diff --git a/.github/release-drafter-python-client.yml b/.github/release-drafter-python-client.yml new file mode 100644 index 000000000..bef2932d1 --- /dev/null +++ b/.github/release-drafter-python-client.yml @@ -0,0 +1,9 @@ +_extends: kaskada:.github/release-drafter.yml + +name-template: Python $RESOLVED_VERSION +tag-template: python@v$RESOLVED_VERSION +tag-prefix: python@v + +# Only include PRs with one of these labels +include-labels: + - python \ No newline at end of file diff --git a/.github/release-drafter-python.yml b/.github/release-drafter-python.yml index bef2932d1..7e790b50c 100644 --- a/.github/release-drafter-python.yml +++ b/.github/release-drafter-python.yml @@ -1,9 +1,6 @@ _extends: kaskada:.github/release-drafter.yml -name-template: Python $RESOLVED_VERSION -tag-template: python@v$RESOLVED_VERSION -tag-prefix: python@v - -# Only include PRs with one of these labels -include-labels: - - python \ No newline at end of file +name-template: Kaskada $RESOLVED_VERSION-a.0 +tag-template: v$RESOLVED_VERSION-a.0 +tag-prefix: v +include-pre-releases: true \ No newline at end of file diff --git a/.github/workflows/ci_python.yml b/.github/workflows/ci_python.yml new file mode 100644 index 000000000..080ccc726 --- /dev/null +++ b/.github/workflows/ci_python.yml @@ -0,0 +1,162 @@ +# This file was initially generated by maturin v1.2.0, using: +# +# maturin generate-ci github --pytest -o ../.github/workflows/ci_python.yml +# +# TODO: +# - `safety` +# - `typeguard` +# - `xdoctest` +# +# Currently, `mypy` and `pytest` are running as part of building the rules. +# We could (in theory) use a matrix to do these in parallel fetching the +# built wheels, but it doesn't seem likely to provide a significant speedup. +name: Python CI + +# Only one job per-ref +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +on: + push: + branches: + - main + tags: + - '*' + pull_request: + branches: + - main + merge_group: + branches: + - main + +defaults: + run: + shell: bash + working-directory: python + +permissions: + contents: read + +jobs: + # Run lint separately from the build -- it doesn't need the built Rust code. + # + # Also, only do it on one machine. + lint: + if: github.event_name == 'pull_request' || github.event_name == 'merge_group' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Install poetry + run: | + pipx install poetry + poetry config virtualenvs.create true --local + poetry config virtualenvs.in-project true --local + - uses: actions/setup-python@v4 + with: + python-version: 3.11 + cache: poetry + - name: install deps + run: | + poetry install --only=main --only=lint + - name: black + run: | + poetry run black --diff pysrc pytests docs/source + - name: flake8 + run: | + poetry run flake8 pysrc pytests docs/source + - name: isort + run: | + poetry run isort --filter-files --diff pysrc pytests docs/source + - name: darglint + run: | + poetry run darglint pysrc + - name: pydocstyle + run: | + poetry run pydocstyle pysrc + + debug: + if: | + github.event_name == 'pull_request' || + github.event_name == 'merge_group' || + (github.event_name == 'push' && github.ref == 'refs/heads/main') + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Install poetry + run: | + pipx install poetry + poetry config virtualenvs.create true --local + poetry config virtualenvs.in-project true --local + - uses: actions/setup-python@v4 + with: + python-version: | + 3.8 + 3.9 + 3.10 + 3.11 + cache: poetry + - name: Build debug wheels + uses: messense/maturin-action@v1 + with: + target: x86_64 + manylinux: 2_28 + args: --out dist --profile dev + working-directory: python + before-script-linux: | + set -e + dnf -y install clang protobuf-devel lld + clang --version + protoc --version + - name: pytest and mypy + run: | + for V in 3.8 3.9 3.10 3.11; do + echo "::group::Install for Python $V" + poetry env use $V + poetry env info + source $(poetry env info --path)/bin/activate + poetry install --only=test --only=typecheck + pip install 'kaskada[plot]>=0.6.0-a.0' --find-links dist --force-reinstall + echo "::endgroup::" + echo "::group::Test Python $V" + poetry run pytest + echo "::endgroup::" + echo "::group::MyPy Python $V" + poetry run mypy -- --install-types --non-interactive pysrc pytests + echo "::endgroup::" + deactivate + done + - name: Build docs + run: | + poetry env use 3.11 + source $(poetry env info --path)/bin/activate + poetry install --with=docs + pip install 'kaskada[plot]>=0.6.0-a.0' --find-links dist --force-reinstall + ls docs/source/_static + sphinx-build docs/source docs/_build -j auto -W + deactivate + - name: Upload docs + uses: actions/upload-pages-artifact@v2 + with: + # Automatically uploads an artifact from the './_site' directory by default + path: ${{ github.workspace }}/python/docs/_build + + docs-deploy: + # Deploy docs on push to main. + if: github.event_name == 'push' && github.ref == 'refs/heads/main' + # Deployment job + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + + # Grant GITHUB_TOKEN the permissions required to make a Pages deployment + permissions: + pages: write # to deploy to Pages + id-token: write # to verify the deployment originates from an appropriate source + + runs-on: ubuntu-latest + needs: [debug] + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v2 \ No newline at end of file diff --git a/.github/workflows/release_drafter.yml b/.github/workflows/release_drafter.yml index 540fef909..1181f6150 100644 --- a/.github/workflows/release_drafter.yml +++ b/.github/workflows/release_drafter.yml @@ -25,12 +25,24 @@ jobs: disable-autolabeler: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + python_client_draft: + runs-on: ubuntu-latest + concurrency: + group: python-client-release + steps: + - name: Draft Python Client release + uses: release-drafter/release-drafter@v5 + with: + config-name: release-drafter-python-client.yml + disable-autolabeler: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} python_draft: runs-on: ubuntu-latest concurrency: group: python-release steps: - - name: Draft Python release + - name: Draft Release Notes uses: release-drafter/release-drafter@v5 with: config-name: release-drafter-python.yml diff --git a/.github/workflows/release_engine.yml b/.github/workflows/release_engine.yml index 7a0c7f0bf..c7a8099f1 100644 --- a/.github/workflows/release_engine.yml +++ b/.github/workflows/release_engine.yml @@ -374,6 +374,7 @@ jobs: - name: Build and push Docker images for Jupyter Beta uses: docker/build-push-action@v4 + if: startsWith(github.ref, 'refs/tags/engine@v') && contains(github.ref, 'beta') with: context: . platforms: linux/amd64, linux/arm64 diff --git a/.github/workflows/release_python.yml b/.github/workflows/release_python.yml new file mode 100644 index 000000000..9b3114daf --- /dev/null +++ b/.github/workflows/release_python.yml @@ -0,0 +1,309 @@ +# Release workflow for the Kaskada Python library. +# +# This flow is triggered by the creation of a new GitHub release. +# Generally, the new release should be marked as a "Pre-Release". +# +# The creation of the release will cause this to determine the version +# and build the Python wheels on multiple platforms, before combining +# them and uploading the new Python library to PyPI. +# +# Ideas for improvement +# --------------------- +# 1. After releasing we should update the `latest` tag. We don't do this +# yet because the old version of the Kaskada Python client downloads +# the latest engine release. +# 2. After releasing, we should update the version in the checked in +# project configurations to be a `-dev` pre-release of the next version. +# 3. We could add a step that uploads the wheels to Test PyPi for verification. +# 4. We could build the wheels on merge to main, allowing the release process +# to retrieve them rather than building them. This would also let us do the +# the bulk of the multi-platform testing regularly, as well as making it +# less likely that we'd need to recreate a release to deal with problems. +# 5. Schedule a weekly, automatic release. +name: Python Release + +# Only one job per-ref +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +on: + release: + types: + - published + +defaults: + run: + shell: bash + working-directory: python + +permissions: + contents: read + +jobs: + version: + runs-on: ubuntu-latest + outputs: + version: ${{ steps.set-version.outputs.version }} + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: 3.11 + - name: Determine version (current) + if: github.event_name != 'release' + run: | + pip install tomlkit + VERSION=$(python ../scripts/get_version.py pyproject.toml project.version) + echo "VERSION=$VERSION" >> $GITHUB_ENV + - name: Determine version (release) + if: github.event_name == 'release' + run: | + VERSION=${GITHUB_REF#refs/tags/v} + echo "VERSION=$VERSION" >> $GITHUB_ENV + - name: Set version + id: set-version + run: | + echo "version=$VERSION" >> $GITHUB_OUTPUT + + build-wheel-macos: + # Build wheels during release. + if: github.event_name == 'release' + runs-on: macos-latest + needs: [version] + strategy: + matrix: + include: + - target: x86_64 + - target: universal2 + steps: + - uses: actions/checkout@v3 + - name: Install poetry + run: | + pipx install poetry + poetry config virtualenvs.create true --local + poetry config virtualenvs.in-project true --local + - uses: actions/setup-python@v4 + with: + python-version: | + 3.8 + 3.9 + 3.10 + 3.11 + architecture: x64 + cache: poetry + - name: Install Protoc + uses: arduino/setup-protoc@v1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - name: Set Version (For Release) + if: github.event_name == 'release' + run: | + pip install tomlkit + python ../scripts/set_versions.py ${{ needs.version.outputs.version }} \ + pyproject.toml:project.version \ + pyproject.toml:tool.poetry.version \ + Cargo.toml:package.version + - name: Build wheel + uses: messense/maturin-action@v1 + with: + target: ${{ matrix.target }} + args: --release --out dist --sdist + working-directory: python + - name: pytest and mypy + run: | + for V in 3.8 3.9 3.10 3.11; do + echo "::group::Install for Python $V" + poetry env use $V + source $(poetry env info --path)/bin/activate + poetry install --only=test --only=typecheck + pip install 'kaskada[plot]>=0.6.0-a.0' --find-links dist --force-reinstall + echo "::endgroup::" + echo "::group::Test Python $V" + poetry run pytest + echo "::endgroup::" + echo "::group::MyPy Python $V" + poetry run mypy -- --install-types --non-interactive pysrc pytests + echo "::endgroup::" + deactivate + done + - name: Upload wheels + uses: actions/upload-artifact@v2 + with: + name: wheels + path: ${{ github.workspace }}/python/dist + + build-wheel-windows: + if: github.event_name == 'release' + needs: [version] + runs-on: windows-latest + steps: + - uses: actions/checkout@v3 + - name: Install poetry + run: | + pipx install poetry + poetry config virtualenvs.create true --local + poetry config virtualenvs.in-project true --local + - uses: actions/setup-python@v4 + with: + python-version: 3.11 + architecture: x64 + cache: poetry + - name: Install Protoc + uses: arduino/setup-protoc@v1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - name: Set Version (For Release) + if: github.event_name == 'release' + run: | + pip install tomlkit + python ../scripts/set_versions.py ${{ needs.version.outputs.version }} \ + pyproject.toml:project.version \ + pyproject.toml:tool.poetry.version \ + Cargo.toml:package.version + - name: Build wheels + uses: messense/maturin-action@v1 + with: + target: x64 + args: --release --out dist + working-directory: python + - name: pytest and mypy (Windows x64) + shell: bash + run: | + echo "::group::Install for Python 3.11" + source $(poetry env info --path)\\Scripts\\activate + poetry install --only=test --only=typecheck + pip install 'kaskada[plot]>=0.6.0-a.0' --find-links dist --force-reinstall + echo "::endgroup::" + echo "::group::Test Python 3.11" + poetry run pytest + echo "::endgroup::" + echo "::group::MyPy Python 3.11" + poetry run mypy -- --install-types --non-interactive pysrc pytests + echo "::endgroup::" + deactivate + - name: Upload wheels + uses: actions/upload-artifact@v2 + with: + name: wheels + path: ${{ github.workspace }}/python/dist + + build-wheel-linux: + if: github.event_name == 'release' + needs: [version] + runs-on: ubuntu-latest + strategy: + matrix: + include: + - target: x86_64 + steps: + - uses: actions/checkout@v3 + - name: Install poetry + run: | + pipx install poetry + poetry config virtualenvs.create true --local + poetry config virtualenvs.in-project true --local + - uses: actions/setup-python@v4 + with: + python-version: | + 3.8 + 3.9 + 3.10 + 3.11 + cache: poetry + - name: Set Version (For Release) + if: github.event_name == 'release' + run: | + pip install tomlkit + python ../scripts/set_versions.py ${{ needs.version.outputs.version }} \ + pyproject.toml:project.version \ + pyproject.toml:tool.poetry.version \ + Cargo.toml:package.version + - name: Build wheels + uses: messense/maturin-action@v1 + with: + target: ${{ matrix.target }} + manylinux: 2_28 + args: --release --out dist + working-directory: python + before-script-linux: | + set -e + dnf -y install clang protobuf-devel lld + clang --version + protoc --version + - name: pytest and mypy (Linux x86_64) + if: matrix.target == 'x86_64' + run: | + for V in 3.8 3.9 3.10 3.11; do + echo "::group::Install for Python $V" + poetry env use $V + poetry env info + source $(poetry env info --path)/bin/activate + poetry install --only=test --only=typecheck + pip install 'kaskada[plot]>=0.6.0-a.0' --find-links dist --force-reinstall + echo "::endgroup::" + echo "::group::Test Python $V" + poetry run pytest + echo "::endgroup::" + echo "::group::MyPy Python $V" + poetry run mypy -- --install-types --non-interactive pysrc pytests + echo "::endgroup::" + deactivate + done + - name: Upload wheels + uses: actions/upload-artifact@v2 + with: + name: wheels + path: ${{ github.workspace }}/python/dist + + # Make the source distribution + sdist: + if: github.event_name == 'release' + needs: [version] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set Version (For Release) + if: github.event_name == 'release' + run: | + pip install tomlkit + python ../scripts/set_versions.py ${{ needs.version.outputs.version }} \ + pyproject.toml:project.version \ + pyproject.toml:tool.poetry.version \ + Cargo.toml:package.version + - name: Build sdist + uses: PyO3/maturin-action@v1 + with: + command: sdist + args: --out dist + working-directory: python + - name: Upload sdist + uses: actions/upload-artifact@v3 + with: + name: wheels + path: ${{ github.workspace }}/python/dist + + release: + name: Release + runs-on: ubuntu-latest + environment: pypi + needs: [build-wheel-linux, build-wheel-windows, build-wheel-macos, sdist] + permissions: + id-token: write + if: github.event_name == 'release' && github.event.action == 'published' + steps: + - uses: actions/download-artifact@v3 + with: + name: wheels + path: dist + + - uses: pypa/gh-action-pypi-publish@release/v1 + # with: + # repository-url: https://test.pypi.org/legacy/ + + - name: Publish release + # TODO: Add `--latest` when we stop having the python client download + # the latest version. + run: | + gh release edit ${{ github.ref_name }} \ + --draft=false --discussion-category=Announcements" \ No newline at end of file diff --git a/.gitignore b/.gitignore index 1fb60ef1c..9caf8ef45 100644 --- a/.gitignore +++ b/.gitignore @@ -1,12 +1,9 @@ **/target -**/*.rs.bk /.idea *.iml **/.*.pending-snap -/catalog.json -*.svg -sparrow-main/tests/test-output -sparrow-main/test-storage +/crates/sparrow-main/tests/test-output +/crates/sparrow-main/test-storage !/.vscode # ignore notice files copied to the wren folder during local builds diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e6a5fd12f..7d674785f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -94,7 +94,7 @@ Run `cargo build --release -p sparrow-main` to build a release (optimized) binar ### Testing & Building the API * ensure docker is running locally -* run `make proto/generate` and `make ent/generate`. See the `./wren/README.md` for more info on those. +* run `make proto/generate`, `make ent/generate`, `make wren/generate-mocks`. See the `./wren/README.md` for more info on those. * run `make wren/test` ### Testing & Building the Python Client @@ -129,6 +129,7 @@ After making code changes, `ctrl-c` in the services window and restart it. #### locally, with the local backend +* run `make test/int/docker-up-dependencies-only` in one terminal window to get the dependencies up * run `make sparrow/run` in one terminal window to get the Engine service up * run `make wren/run` in a second terminal window to get the Manager service up * run `make test/int/run-api` in a third another terminal window to run the integration tests @@ -137,7 +138,7 @@ After making code changes, `ctrl-c` in the proper service window and restart it. #### locally, with the s3 backend -* run `make test/int/docker-up-s3-only` in one terminal window to get the dependencies up +* run `make test/int/docker-up-dependencies-only` in one terminal window to get the dependencies up * run `make sparrow/run-s3` in a second terminal window to get the Engine service up * run `make wren/run-s3` in a third terminal window to get the Manager service up * run `make test/int/run-api-s3` in a fourth terminal window to run the integration tests @@ -157,4 +158,4 @@ After making code changes, `ctrl-c` in the proper service window and restart it. Provides the language server integration for Rust code. * * [Even Better TOML](https://marketplace.visualstudio.com/items?itemName=tamasfe.even-better-toml). Optional. * * [Cargo](https://marketplace.visualstudio.com/items?itemName=panicbit.cargo). Optional. -* * [Crates](https://marketplace.visualstudio.com/items?itemName=serayuzgur.crates) Optional. \ No newline at end of file +* * [Crates](https://marketplace.visualstudio.com/items?itemName=serayuzgur.crates) Optional. diff --git a/Cargo.lock b/Cargo.lock index c8143a93a..94b9e9e91 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" dependencies = [ "gimli", ] @@ -49,9 +49,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" +checksum = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a" dependencies = [ "memchr", ] @@ -71,6 +71,12 @@ dependencies = [ "alloc-no-stdlib", ] +[[package]] +name = "allocator-api2" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" + [[package]] name = "android-tzdata" version = "0.1.1" @@ -133,9 +139,9 @@ dependencies = [ [[package]] name = "anstyle-wincon" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c" dependencies = [ "anstyle", "windows-sys 0.48.0", @@ -143,9 +149,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.72" +version = "1.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" +checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" dependencies = [ "backtrace", ] @@ -161,7 +167,7 @@ dependencies = [ "lazy_static", "libflate", "log", - "num-bigint 0.4.3", + "num-bigint 0.4.4", "quad-rand", "rand 0.8.5", "regex", @@ -172,7 +178,7 @@ dependencies = [ "thiserror", "typed-builder 0.10.0", "uuid 1.4.1", - "zerocopy 0.6.1", + "zerocopy 0.6.3", ] [[package]] @@ -460,7 +466,7 @@ dependencies = [ "async-lock", "async-task", "concurrent-queue", - "fastrand", + "fastrand 1.9.0", "futures-lite", "slab", ] @@ -496,15 +502,15 @@ dependencies = [ "polling", "rustix 0.37.23", "slab", - "socket2", + "socket2 0.4.9", "waker-fn", ] [[package]] name = "async-lock" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" dependencies = [ "event-listener", ] @@ -523,9 +529,9 @@ dependencies = [ [[package]] name = "async-once-cell" -version = "0.3.1" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72faff1fdc615a0199d7bf71e6f389af54d46a66e9beb5d76c39e48eda93ecce" +checksum = "9338790e78aa95a416786ec8389546c4b6a1dfc3dc36071ed9518a9413a542eb" [[package]] name = "async-process" @@ -592,7 +598,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -603,20 +609,20 @@ checksum = "ecc7ab41815b3c653ccd2978ec3255c81349336702dfdf62ee6f7069b12a3aae" [[package]] name = "async-trait" -version = "0.1.71" +version = "0.1.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" +checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] name = "asynchronous-codec" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06a0daa378f5fd10634e44b0a29b2a87b890657658e072a30d6f26e57ddee182" +checksum = "4057f2c32adbb2fc158e22fb38433c8e9bbf76b75a4732c7c0cbaf695fb65568" dependencies = [ "bytes", "futures-sink", @@ -640,17 +646,6 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1181e1e0d1fce796a03db1ae795d67167da795f9cf4a39c37589e85ef57f26d3" -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -692,9 +687,9 @@ dependencies = [ [[package]] name = "axum" -version = "0.6.19" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a1de45611fdb535bfde7b7de4fd54f4fd2b17b1737c0a59b69bf9b92074b8c" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" dependencies = [ "async-trait", "axum-core", @@ -737,9 +732,9 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.68" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" dependencies = [ "addr2line", "cc", @@ -770,11 +765,13 @@ checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" [[package]] name = "bigdecimal" -version = "0.3.1" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" +checksum = "454bca3db10617b88b566f205ed190aedb0e0e6dd4cad61d3988a72e8c5594cb" dependencies = [ - "num-bigint 0.4.3", + "autocfg", + "libm", + "num-bigint 0.4.4", "num-integer", "num-traits", "serde", @@ -791,9 +788,9 @@ dependencies = [ [[package]] name = "bindgen" -version = "0.64.0" +version = "0.65.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4243e6031260db77ede97ad86c27e501d646a27ab57b59a574f725d98ab1fb4" +checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5" dependencies = [ "bitflags 1.3.2", "cexpr", @@ -801,12 +798,13 @@ dependencies = [ "lazy_static", "lazycell", "peeking_take_while", + "prettyplease 0.2.12", "proc-macro2", "quote", "regex", "rustc-hash", "shlex", - "syn 1.0.109", + "syn 2.0.29", ] [[package]] @@ -832,9 +830,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" [[package]] name = "bitvec" @@ -868,7 +866,7 @@ dependencies = [ "async-lock", "async-task", "atomic-waker", - "fastrand", + "fastrand 1.9.0", "futures-lite", "log", ] @@ -912,7 +910,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" dependencies = [ "memchr", - "regex-automata 0.3.3", + "regex-automata 0.3.6", "serde", ] @@ -953,11 +951,12 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.0.79" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" dependencies = [ "jobserver", + "libc", ] [[package]] @@ -1068,21 +1067,9 @@ dependencies = [ [[package]] name = "clap" -version = "3.2.25" +version = "4.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" -dependencies = [ - "bitflags 1.3.2", - "clap_lex 0.2.4", - "indexmap 1.9.3", - "textwrap", -] - -[[package]] -name = "clap" -version = "4.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f644d0dac522c8b05ddc39aaaccc5b136d5dc4ff216610c5641e3be5becf56c" +checksum = "fb690e81c7840c0d7aade59f242ea3b41b9bc27bcd5997890e7702ae4b32e487" dependencies = [ "clap_builder", "clap_derive", @@ -1091,13 +1078,13 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.3.15" +version = "4.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af410122b9778e024f9e0fb35682cc09cc3f85cad5e8d3ba8f47a9702df6e73d" +checksum = "5ed2e96bc16d8d740f6f48d663eddf4b8a0983e79210fd55479b7bcd0a69860e" dependencies = [ "anstream", "anstyle", - "clap_lex 0.5.0", + "clap_lex", "strsim", ] @@ -1110,16 +1097,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.26", -] - -[[package]] -name = "clap_lex" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" -dependencies = [ - "os_str_bytes", + "syn 2.0.29", ] [[package]] @@ -1235,6 +1213,17 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +[[package]] +name = "core_affinity" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622892f5635ce1fc38c8f16dfc938553ed64af482edb5e150bf4caedbfcb2304" +dependencies = [ + "libc", + "num_cpus", + "winapi", +] + [[package]] name = "cpu-time" version = "1.0.0" @@ -1295,20 +1284,20 @@ dependencies = [ [[package]] name = "criterion" -version = "0.4.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" dependencies = [ "anes", - "atty", "cast", "ciborium", - "clap 3.2.25", + "clap", "criterion-plot", "futures", - "itertools", - "lazy_static", + "is-terminal", + "itertools 0.10.5", "num-traits", + "once_cell", "oorandom", "regex", "serde", @@ -1326,14 +1315,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" dependencies = [ "cast", - "itertools", + "itertools 0.10.5", ] [[package]] name = "critical-section" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6548a0ad5d2549e111e1f6a11a6c2e2d00ce6a3dafe22948d67c2b443f775e52" +checksum = "7059fff8937831a9ae6f0fe4d658ffabf58f2ca96aa9dec1c889f936f705f216" [[package]] name = "crossbeam-channel" @@ -1393,9 +1382,9 @@ dependencies = [ [[package]] name = "dashmap" -version = "5.5.0" +version = "5.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6943ae99c34386c84a470c499d3414f66502a41340aa895406e0d2e4a207b91d" +checksum = "edd72493923899c6f10c641bdbdeddc7183d6396641d99c1a0d1597f37f92e28" dependencies = [ "cfg-if", "hashbrown 0.14.0", @@ -1437,9 +1426,9 @@ dependencies = [ [[package]] name = "deunicode" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "850878694b7933ca4c9569d30a34b55031b9b139ee1fc7b94a527c4ef960d690" +checksum = "d95203a6a50906215a502507c0f879a0ce7ff205a6111e2db2a5ef8e4bb92e43" [[package]] name = "diff" @@ -1525,9 +1514,9 @@ dependencies = [ [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "ena" @@ -1552,9 +1541,9 @@ checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "encoding_rs" -version = "0.8.32" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ "cfg-if", ] @@ -1568,27 +1557,27 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] name = "enum-map" -version = "2.6.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "017b207acb4cc917f4c31758ed95c0bc63ddb0f358b22eb38f80a2b2a43f6b1f" +checksum = "9705d8de4776df900a4a0b2384f8b0ab42f775e93b083b42f8ce71bdc32a47e3" dependencies = [ "enum-map-derive", ] [[package]] name = "enum-map-derive" -version = "0.12.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8560b409800a72d2d7860f8e5f4e0b0bd22bea6a352ea2a9ce30ccdef7f16d2f" +checksum = "ccb14d927583dd5c2eac0f2cf264fc4762aefe1ae14c47a8a20fc1939d3a5fc0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -1608,18 +1597,18 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "erased-serde" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da96524cc884f6558f1769b6c46686af2fe8e8b4cd253bd5a3cdba8181b8e070" +checksum = "fc978899517288e3ebbd1a3bfc1d9537dbb87eeab149e53ea490e63bcdff561a" dependencies = [ "serde", ] [[package]] name = "errno" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" dependencies = [ "errno-dragonfly", "libc", @@ -1655,9 +1644,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "fallible-iterator" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" +checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" [[package]] name = "fallible-streaming-iterator" @@ -1674,15 +1663,21 @@ dependencies = [ "instant", ] +[[package]] +name = "fastrand" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" + [[package]] name = "filetime" -version = "0.2.21" +version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153" +checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.2.16", + "redox_syscall 0.3.5", "windows-sys 0.48.0", ] @@ -1704,9 +1699,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.26" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" +checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" dependencies = [ "crc32fast", "miniz_oxide", @@ -1802,7 +1797,7 @@ version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" dependencies = [ - "fastrand", + "fastrand 1.9.0", "futures-core", "futures-io", "memchr", @@ -1819,7 +1814,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -1867,6 +1862,19 @@ dependencies = [ "byteorder", ] +[[package]] +name = "generator" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cc16584ff22b460a382b7feec54b23d2908d858152e5739a120b949293bd74e" +dependencies = [ + "cc", + "libc", + "log", + "rustversion", + "windows", +] + [[package]] name = "generic-array" version = "0.14.7" @@ -1901,9 +1909,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.27.3" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" [[package]] name = "glob" @@ -1913,9 +1921,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.11" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1391ab1f92ffcc08911957149833e682aa3fe252b9f45f966d2ef972274c97df" +checksum = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d" dependencies = [ "aho-corasick", "bstr 1.6.0", @@ -1949,9 +1957,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.20" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" dependencies = [ "bytes", "fnv", @@ -2004,20 +2012,15 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.13.2" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" dependencies = [ "ahash 0.8.3", + "allocator-api2", "serde", ] -[[package]] -name = "hashbrown" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" - [[package]] name = "heapless" version = "0.7.16" @@ -2047,15 +2050,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.3.2" @@ -2098,9 +2092,9 @@ checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "humansize" @@ -2134,7 +2128,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.4.9", "tokio", "tower-service", "tracing", @@ -2297,9 +2291,9 @@ checksum = "8bb03732005da905c88227371639bf1ad885cc712789c011c31c5fb3ab3ccf02" [[package]] name = "inventory" -version = "0.3.9" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25b1d6b4b9fb75fc419bdef998b689df5080a32931cb3395b86202046b56a9ea" +checksum = "a53088c87cf71c9d4f3372a2cb9eea1e7b8a0b1bf8b7f7d23fe5b76dbb07e63b" [[package]] name = "io-lifetimes" @@ -2307,7 +2301,7 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi", "libc", "windows-sys 0.48.0", ] @@ -2324,8 +2318,8 @@ version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ - "hermit-abi 0.3.2", - "rustix 0.38.4", + "hermit-abi", + "rustix 0.38.8", "windows-sys 0.48.0", ] @@ -2338,6 +2332,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.9" @@ -2392,20 +2395,21 @@ dependencies = [ [[package]] name = "lalrpop" -version = "0.19.12" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a1cbf952127589f2851ab2046af368fd20645491bb4b376f04b7f94d7a9837b" +checksum = "da4081d44f4611b66c6dd725e6de3169f9f63905421e8626fcb86b6a898998b8" dependencies = [ "ascii-canvas", "bit-set", "diff", "ena", "is-terminal", - "itertools", + "itertools 0.10.5", "lalrpop-util", "petgraph", + "pico-args", "regex", - "regex-syntax 0.6.29", + "regex-syntax 0.7.4", "string_cache", "term", "tiny-keccak", @@ -2414,9 +2418,9 @@ dependencies = [ [[package]] name = "lalrpop-util" -version = "0.19.12" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3c48237b9604c5a4702de6b824e02006c3214327564636aef27c1028a8fa0ed" +checksum = "3f35c735096c0293d313e8f2a641627472b83d01b937177fe76e5e2708d31e0d" dependencies = [ "regex", ] @@ -2541,9 +2545,9 @@ checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" [[package]] name = "librocksdb-sys" -version = "0.8.3+7.4.4" +version = "0.11.0+8.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "557b255ff04123fcc176162f56ed0c9cd42d8f357cf55b3fabeb60f7413741b3" +checksum = "d3386f101bcb4bd252d8e9d2fb41ec3b0862a15a62b478c355b2982efa469e3e" dependencies = [ "bindgen", "bzip2-sys", @@ -2551,13 +2555,14 @@ dependencies = [ "glob", "libc", "libz-sys", + "lz4-sys", ] [[package]] name = "libz-sys" -version = "1.1.9" +version = "1.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ee889ecc9568871456d42f603d6a0ce59ff328d291063a45cbdf0036baf6db" +checksum = "d97137b25e321a73eef1418d1d5d2eda4d77e12813f8e6dead84bc52c5870a7b" dependencies = [ "cc", "pkg-config", @@ -2578,9 +2583,9 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" [[package]] name = "linux-raw-sys" -version = "0.4.3" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" +checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" [[package]] name = "lock_api" @@ -2594,9 +2599,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.19" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" dependencies = [ "value-bag", ] @@ -2624,6 +2629,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "loom" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce9394216e2be01e607cf9e9e2b64c387506df1e768b14cbd2854a3650c3c03e" +dependencies = [ + "cfg-if", + "generator", + "scoped-tls", + "tracing", + "tracing-subscriber", +] + [[package]] name = "lz4" version = "1.24.0" @@ -2655,9 +2673,9 @@ dependencies = [ [[package]] name = "matchit" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40" +checksum = "ed1202b2a6f884ae56f04cff409ab315c5ce26b5e58d7412e484f01fd52f52ef" [[package]] name = "memchr" @@ -2753,7 +2771,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05180d69e3da0e530ba2a1dae5110317e49e3b7f3d41be227dc5f92e49ee7af" dependencies = [ - "num-bigint 0.4.3", + "num-bigint 0.4.4", "num-complex", "num-integer", "num-iter", @@ -2774,9 +2792,9 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" dependencies = [ "autocfg", "num-integer", @@ -2785,9 +2803,9 @@ dependencies = [ [[package]] name = "num-complex" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" +checksum = "1ba157ca0885411de85d6ca030ba7e2a83a28636056c7c699b07c8b6f7383214" dependencies = [ "num-traits", ] @@ -2820,16 +2838,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" dependencies = [ "autocfg", - "num-bigint 0.4.3", + "num-bigint 0.4.4", "num-integer", "num-traits", ] [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", "libm", @@ -2841,15 +2859,15 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi", "libc", ] [[package]] name = "object" -version = "0.31.1" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" +checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe" dependencies = [ "memchr", ] @@ -2867,7 +2885,7 @@ dependencies = [ "futures", "humantime", "hyper", - "itertools", + "itertools 0.10.5", "parking_lot 0.12.1", "percent-encoding", "quick-xml", @@ -2898,9 +2916,9 @@ checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" [[package]] name = "openssl" -version = "0.10.55" +version = "0.10.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d" +checksum = "729b745ad4a5575dd06a3e1af1414bd330ee561c01b3899eb584baeaa8def17e" dependencies = [ "bitflags 1.3.2", "cfg-if", @@ -2919,7 +2937,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -2928,23 +2946,33 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +[[package]] +name = "openssl-src" +version = "111.27.0+1.1.1v" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06e8f197c82d7511c5b014030c9b1efeda40d7d5f99d23b4ceed3524a5e63f02" +dependencies = [ + "cc", +] + [[package]] name = "openssl-sys" -version = "0.9.90" +version = "0.9.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" +checksum = "866b5f16f90776b9bb8dc1e1802ac6f0513de3a7a7465867bfbc563dc737faac" dependencies = [ "cc", "libc", + "openssl-src", "pkg-config", "vcpkg", ] [[package]] name = "opentelemetry" -version = "0.18.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69d6c3d7288a106c0a363e4b0e8d308058d56902adefb16f4936f417ffef086e" +checksum = "5f4b8347cc26099d3aeee044065ecc3ae11469796b4d65d065a23a584ed92a6f" dependencies = [ "opentelemetry_api", "opentelemetry_sdk", @@ -2952,9 +2980,9 @@ dependencies = [ [[package]] name = "opentelemetry-otlp" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1c928609d087790fc936a1067bdc310ae702bdf3b090c3f281b713622c8bbde" +checksum = "8af72d59a4484654ea8eb183fea5ae4eb6a41d7ac3e3bae5f4d2a282a3a7d3ca" dependencies = [ "async-trait", "futures", @@ -2965,44 +2993,43 @@ dependencies = [ "prost", "thiserror", "tokio", - "tonic", + "tonic 0.8.3", ] [[package]] name = "opentelemetry-proto" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61a2f56df5574508dd86aaca016c917489e589ece4141df1b5e349af8d66c28" +checksum = "045f8eea8c0fa19f7d48e7bc3128a39c2e5c533d5c61298c548dfefc1064474c" dependencies = [ "futures", "futures-util", "opentelemetry", "prost", - "tonic", - "tonic-build", + "tonic 0.8.3", ] [[package]] name = "opentelemetry_api" -version = "0.18.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c24f96e21e7acc813c7a8394ee94978929db2bcc46cf6b5014fc612bf7760c22" +checksum = "ed41783a5bf567688eb38372f2b7a8530f5a607a4b49d38dd7573236c23ca7e2" dependencies = [ "fnv", "futures-channel", "futures-util", "indexmap 1.9.3", - "js-sys", "once_cell", "pin-project-lite", "thiserror", + "urlencoding", ] [[package]] name = "opentelemetry_sdk" -version = "0.18.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ca41c4933371b61c2a2f214bf16931499af4ec90543604ec828f7a625c09113" +checksum = "8b3a2a91fdbfdd4d212c0dcc2ab540de2c2bcbbd90be17de7a7daf8822d010c1" dependencies = [ "async-trait", "crossbeam-channel", @@ -3029,12 +3056,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "os_str_bytes" -version = "6.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d5d9eb14b174ee9aa2ef96dc2b94637a2d4b6e7cb873c7e171f0c20c6cf3eac" - [[package]] name = "overload" version = "0.1.1" @@ -3101,7 +3122,7 @@ dependencies = [ "libc", "redox_syscall 0.3.5", "smallvec", - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -3127,7 +3148,7 @@ dependencies = [ "hashbrown 0.14.0", "lz4", "num", - "num-bigint 0.4.3", + "num-bigint 0.4.4", "paste", "seq-macro", "snap", @@ -3160,7 +3181,7 @@ dependencies = [ "regex", "regex-syntax 0.7.4", "structmeta", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -3201,9 +3222,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.7.1" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d2d1d55045829d65aad9d389139882ad623b33b904e7c9f1b10c5b8927298e5" +checksum = "1acb4a4365a13f749a93f1a094a7805e5cfa0955373a9de860d962eaa3a5fe5a" dependencies = [ "thiserror", "ucd-trie", @@ -3211,9 +3232,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.1" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f94bca7e7a599d89dea5dfa309e217e7906c3c007fb9c3299c40b10d6a315d3" +checksum = "666d00490d4ac815001da55838c500eafb0320019bbaa44444137c48b443a853" dependencies = [ "pest", "pest_generator", @@ -3221,22 +3242,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.1" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d490fe7e8556575ff6911e45567ab95e71617f43781e5c05490dc8d75c965c" +checksum = "68ca01446f50dbda87c1786af8770d535423fa8a53aec03b8f4e3d7eb10e0929" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] name = "pest_meta" -version = "2.7.1" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2674c66ebb4b4d9036012091b537aae5878970d6999f81a265034d85b136b341" +checksum = "56af0a30af74d0445c0bf6d9d051c979b516a1a5af790d251daee76005420a48" dependencies = [ "once_cell", "pest", @@ -3245,12 +3266,12 @@ dependencies = [ [[package]] name = "petgraph" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 1.9.3", + "indexmap 2.0.0", ] [[package]] @@ -3292,31 +3313,37 @@ dependencies = [ "uncased", ] +[[package]] +name = "pico-args" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" + [[package]] name = "pin-project" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030ad2bc4db10a8944cb0d837f158bdfec4d4a4873ab701a95046770d11f8842" +checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c" +checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] name = "pin-project-lite" -version = "0.2.10" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" +checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05" [[package]] name = "pin-utils" @@ -3348,9 +3375,9 @@ dependencies = [ [[package]] name = "postcard" -version = "1.0.4" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfa512cd0d087cc9f99ad30a1bf64795b67871edbead083ffc3a4dfafa59aa00" +checksum = "c9ee729232311d3cd113749948b689627618133b1c5012b77342c1950b25eaeb" dependencies = [ "cobs", "heapless", @@ -3377,7 +3404,7 @@ checksum = "09963355b9f467184c04017ced4a2ba2d75cbcb4e7462690d388233253d4b1a9" dependencies = [ "anstyle", "difflib", - "itertools", + "itertools 0.10.5", "predicates-core", ] @@ -3407,6 +3434,16 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "prettyplease" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c64d9ba0963cdcea2e1b2230fbae2bab30eb25a174be395c41e764bfb65dd62" +dependencies = [ + "proc-macro2", + "syn 2.0.29", +] + [[package]] name = "prettytable-rs" version = "0.10.0" @@ -3474,12 +3511,12 @@ checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ "bytes", "heck 0.4.1", - "itertools", + "itertools 0.10.5", "lazy_static", "log", "multimap", "petgraph", - "prettyplease", + "prettyplease 0.1.25", "prost", "prost-types", "regex", @@ -3495,7 +3532,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", - "itertools", + "itertools 0.10.5", "proc-macro2", "quote", "syn 1.0.109", @@ -3614,9 +3651,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.31" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" dependencies = [ "proc-macro2", ] @@ -3744,13 +3781,13 @@ checksum = "f4ed1d73fb92eba9b841ba2aef69533a060ccc0d3ec71c90aeda5996d4afb7a9" [[package]] name = "regex" -version = "1.9.1" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.3.3", + "regex-automata 0.3.6", "regex-syntax 0.7.4", ] @@ -3765,9 +3802,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.3" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" dependencies = [ "aho-corasick", "memchr", @@ -3788,9 +3825,9 @@ checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" [[package]] name = "reqwest" -version = "0.11.18" +version = "0.11.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cde824a14b7c14f85caff81225f411faacc04a2013f41670f41443742b1c1c55" +checksum = "20b9b67e2ca7dd9e9f9285b759de30ff538aab981abaaf7bc9bd90b84a0126c3" dependencies = [ "base64 0.21.2", "bytes", @@ -3853,9 +3890,9 @@ checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422" [[package]] name = "rocksdb" -version = "0.19.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e9562ea1d70c0cc63a34a22d977753b50cca91cc6b6527750463bd5dd8697bc" +checksum = "bb6f170a4041d50a0ce04b0d2e14916d6ca863ea2e422689a5b694395d299ffe" dependencies = [ "libc", "librocksdb-sys", @@ -3909,22 +3946,22 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.4" +version = "0.38.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" +checksum = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f" dependencies = [ - "bitflags 2.3.3", + "bitflags 2.4.0", "errno", "libc", - "linux-raw-sys 0.4.3", + "linux-raw-sys 0.4.5", "windows-sys 0.48.0", ] [[package]] name = "rustls" -version = "0.21.5" +version = "0.21.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79ea77c539259495ce8ca47f53e66ae0330a8819f67e23ac96ca02f50e7b7d36" +checksum = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb" dependencies = [ "log", "ring", @@ -3943,9 +3980,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.101.1" +version = "0.101.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15f36a6828982f422756984e47912a7a51dcbc2a197aa791158f8ca61cd8204e" +checksum = "7d93931baf2d282fff8d3a532bbfd7653f734643161b87e3e01e59a04439bf0d" dependencies = [ "ring", "untrusted", @@ -4008,6 +4045,12 @@ dependencies = [ "serde_json", ] +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + [[package]] name = "scopeguard" version = "1.2.0" @@ -4026,9 +4069,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.9.1" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ "bitflags 1.3.2", "core-foundation", @@ -4039,9 +4082,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f51d0c0d83bec45f16480d0ce0058397a69e48fcdc52d1dc8855fb68acbd31a7" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" dependencies = [ "core-foundation-sys", "libc", @@ -4061,35 +4104,44 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4" [[package]] name = "serde" -version = "1.0.171" +version = "1.0.185" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9" +checksum = "be9b6f69f1dfd54c3b568ffa45c310d6973a5e5148fd40cf515acaf38cf5bc31" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.171" +version = "1.0.185" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" +checksum = "dc59dfdcbad1437773485e0367fea4b090a2e0a16d9ffc46af47764536a298ec" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] name = "serde_json" -version = "1.0.103" +version = "1.0.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b" +checksum = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360" dependencies = [ "itoa", "ryu", "serde", ] +[[package]] +name = "serde_spanned" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" +dependencies = [ + "serde", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -4104,9 +4156,9 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.24" +version = "0.9.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd5f51e3fdb5b9cdd1577e1cb7a733474191b1aca6a72c2e50913241632c1180" +checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" dependencies = [ "indexmap 2.0.0", "itoa", @@ -4172,9 +4224,9 @@ dependencies = [ [[package]] name = "similar-asserts" -version = "1.4.2" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbf644ad016b75129f01a34a355dcb8d66a5bc803e417c7a77cc5d5ee9fa0f18" +checksum = "e041bb827d1bfca18f213411d51b665309f1afb37a04a5d1464530e13779fc0f" dependencies = [ "console", "similar", @@ -4182,15 +4234,15 @@ dependencies = [ [[package]] name = "siphasher" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "slab" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] @@ -4251,19 +4303,29 @@ dependencies = [ "winapi", ] +[[package]] +name = "socket2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "sparrow-api" -version = "0.9.0" +version = "0.11.0" dependencies = [ "anyhow", "arrow", "chrono", - "clap 4.3.15", + "clap", "decorum", "derive_more", "enum-map", "error-stack", - "itertools", + "itertools 0.11.0", "prost", "prost-build", "prost-types", @@ -4277,14 +4339,14 @@ dependencies = [ "tempfile", "thiserror", "tokio", - "tonic", + "tonic 0.9.2", "tonic-build", "uuid 1.4.1", ] [[package]] name = "sparrow-arrow" -version = "0.9.0" +version = "0.11.0" dependencies = [ "ahash 0.8.3", "anyhow", @@ -4302,7 +4364,7 @@ dependencies = [ "error-stack", "half 2.3.1", "insta", - "itertools", + "itertools 0.11.0", "num", "proptest", "serde", @@ -4312,7 +4374,7 @@ dependencies = [ [[package]] name = "sparrow-backend" -version = "0.9.0" +version = "0.11.0" dependencies = [ "arrow-schema", "index_vec", @@ -4322,16 +4384,16 @@ dependencies = [ [[package]] name = "sparrow-catalog" -version = "0.9.0" +version = "0.11.0" dependencies = [ "arrow", - "clap 4.3.15", + "clap", "derive_more", "error-stack", "fallible-iterator", "futures", - "hashbrown 0.13.2", - "itertools", + "hashbrown 0.14.0", + "itertools 0.11.0", "logos", "parquet", "prettytable-rs", @@ -4358,14 +4420,15 @@ dependencies = [ [[package]] name = "sparrow-compiler" -version = "0.9.0" +version = "0.11.0" dependencies = [ "ahash 0.8.3", "anyhow", "arrow", + "arrow-schema", "bit-set", "chrono", - "clap 4.3.15", + "clap", "codespan-reporting", "const_format", "decorum", @@ -4374,9 +4437,9 @@ dependencies = [ "egg", "enum-map", "error-stack", - "hashbrown 0.13.2", + "hashbrown 0.14.0", "insta", - "itertools", + "itertools 0.11.0", "lalrpop", "lalrpop-util", "logos", @@ -4395,42 +4458,60 @@ dependencies = [ "sparrow-core", "sparrow-instructions", "sparrow-kernels", - "sparrow-plan", + "sparrow-merge", "sparrow-syntax", "static_init", - "strum 0.24.1", - "strum_macros 0.24.3", + "strum 0.25.0", + "strum_macros 0.25.2", "termcolor", "thiserror", "tokio", - "tonic", + "tonic 0.9.2", "tracing", "uuid 1.4.1", ] [[package]] name = "sparrow-core" -version = "0.9.0" +version = "0.11.0" dependencies = [ "anyhow", "arrow", "chrono", "decorum", "futures", - "itertools", + "itertools 0.11.0", "num", "owning_ref", "parquet", "serde", "sparrow-arrow", "static_init", - "tonic", + "tonic 0.9.2", "tracing", ] +[[package]] +name = "sparrow-execution" +version = "0.11.0" +dependencies = [ + "arrow-array", + "arrow-schema", + "derive_more", + "error-stack", + "index_vec", + "parking_lot 0.12.1", + "sparrow-arrow", + "sparrow-physical", + "sparrow-scheduler", + "sparrow-testing", + "sparrow-transforms", + "tokio", +] + [[package]] name = "sparrow-expressions" -version = "0.9.0" +version = "0.11.0" dependencies = [ "approx 0.5.1", "arrow-arith", @@ -4444,10 +4525,10 @@ dependencies = [ "arrow-string", "derive_more", "error-stack", - "hashbrown 0.13.2", + "hashbrown 0.14.0", "index_vec", "inventory", - "itertools", + "itertools 0.11.0", "num", "serde_json", "sparrow-arrow", @@ -4458,7 +4539,7 @@ dependencies = [ [[package]] name = "sparrow-instructions" -version = "0.9.0" +version = "0.11.0" dependencies = [ "anyhow", "approx 0.5.1", @@ -4470,13 +4551,15 @@ dependencies = [ "chrono", "criterion", "derive_more", + "enum-map", "erased-serde", "error-stack", - "hashbrown 0.13.2", - "itertools", + "hashbrown 0.14.0", + "itertools 0.11.0", "lz4-sys", "num", "owning_ref", + "parse-display", "prost", "prost-wkt-types", "rand 0.8.5", @@ -4487,24 +4570,26 @@ dependencies = [ "sparrow-api", "sparrow-arrow", "sparrow-kernels", - "sparrow-plan", "sparrow-syntax", "static_init", + "strum 0.25.0", + "strum_macros 0.25.2", "tempfile", - "tonic", + "tonic 0.9.2", "tracing", + "uuid 1.4.1", ] [[package]] name = "sparrow-kernels" -version = "0.9.0" +version = "0.11.0" dependencies = [ "anyhow", "arrow", "bitvec", "chrono", "chronoutil", - "itertools", + "itertools 0.11.0", "num", "proptest", "smallvec", @@ -4515,7 +4600,7 @@ dependencies = [ [[package]] name = "sparrow-main" -version = "0.9.0" +version = "0.11.0" dependencies = [ "ahash 0.8.3", "anyhow", @@ -4523,7 +4608,7 @@ dependencies = [ "assert_cmd", "async-stream", "chrono", - "clap 4.3.15", + "clap", "dashmap", "data-encoding", "derive_more", @@ -4531,11 +4616,11 @@ dependencies = [ "fallible-iterator", "filetime", "futures", - "hashbrown 0.13.2", + "hashbrown 0.14.0", "hex", "indoc", "insta", - "itertools", + "itertools 0.11.0", "opentelemetry", "opentelemetry-otlp", "parquet", @@ -4552,7 +4637,6 @@ dependencies = [ "sparrow-instructions", "sparrow-kernels", "sparrow-materialize", - "sparrow-plan", "sparrow-qfr", "sparrow-runtime", "sparrow-syntax", @@ -4561,7 +4645,7 @@ dependencies = [ "tokio", "tokio-stream", "tokio-util", - "tonic", + "tonic 0.9.2", "tonic-health", "tonic-reflection", "tracing", @@ -4574,7 +4658,7 @@ dependencies = [ [[package]] name = "sparrow-materialize" -version = "0.9.0" +version = "0.11.0" dependencies = [ "dashmap", "derive_more", @@ -4588,9 +4672,34 @@ dependencies = [ "tracing", ] +[[package]] +name = "sparrow-merge" +version = "0.11.0" +dependencies = [ + "anyhow", + "arrow-arith", + "arrow-array", + "arrow-csv", + "arrow-ord", + "arrow-schema", + "arrow-select", + "async-stream", + "bit-set", + "derive_more", + "error-stack", + "futures", + "itertools 0.11.0", + "proptest", + "smallvec", + "sparrow-arrow", + "sparrow-core", + "tokio", + "tracing", +] + [[package]] name = "sparrow-physical" -version = "0.9.0" +version = "0.11.0" dependencies = [ "arrow-schema", "bigdecimal", @@ -4600,42 +4709,22 @@ dependencies = [ "serde", "serde_yaml", "sparrow-arrow", -] - -[[package]] -name = "sparrow-plan" -version = "0.9.0" -dependencies = [ - "anyhow", - "arrow", - "enum-map", - "hashbrown 0.13.2", - "itertools", - "parse-display", - "serde_yaml", - "sparrow-api", - "sparrow-arrow", - "sparrow-syntax", - "static_init", - "strum 0.24.1", - "strum_macros 0.24.3", - "tracing", - "uuid 1.4.1", + "strum_macros 0.25.2", ] [[package]] name = "sparrow-qfr" -version = "0.9.0" +version = "0.11.0" dependencies = [ "cpu-time", "derive_more", "error-stack", "fallible-iterator", "futures", - "hashbrown 0.13.2", + "hashbrown 0.14.0", "insta", "inventory", - "itertools", + "itertools 0.11.0", "once_cell", "pin-project", "prost", @@ -4651,23 +4740,22 @@ dependencies = [ [[package]] name = "sparrow-qfr-tool" -version = "0.9.0" +version = "0.11.0" dependencies = [ - "clap 4.3.15", + "clap", "cpu-time", "derive_more", "error-stack", "fallible-iterator", - "hashbrown 0.13.2", + "hashbrown 0.14.0", "insta", - "itertools", + "itertools 0.11.0", "serde", "serde_json", "serde_yaml", "smallvec", "sparrow-api", "sparrow-compiler", - "sparrow-plan", "sparrow-qfr", "sparrow-syntax", "tempfile", @@ -4676,11 +4764,13 @@ dependencies = [ [[package]] name = "sparrow-runtime" -version = "0.9.0" +version = "0.11.0" dependencies = [ "ahash 0.8.3", "anyhow", "arrow", + "arrow-array", + "arrow-select", "async-once-cell", "async-stream", "async-trait", @@ -4690,7 +4780,7 @@ dependencies = [ "bitvec", "bytes", "chrono", - "clap 4.3.15", + "clap", "criterion", "dashmap", "data-encoding", @@ -4702,10 +4792,10 @@ dependencies = [ "futures", "futures-lite", "half 2.3.1", - "hashbrown 0.13.2", + "hashbrown 0.14.0", "insta", "inventory", - "itertools", + "itertools 0.11.0", "kafka", "lz4", "num-traits", @@ -4731,7 +4821,7 @@ dependencies = [ "sparrow-core", "sparrow-instructions", "sparrow-kernels", - "sparrow-plan", + "sparrow-merge", "sparrow-qfr", "sparrow-syntax", "sparrow-testing", @@ -4740,15 +4830,55 @@ dependencies = [ "tokio", "tokio-stream", "tokio-util", - "tonic", + "tonic 0.9.2", "tracing", "url", "uuid 1.4.1", ] +[[package]] +name = "sparrow-scheduler" +version = "0.11.0" +dependencies = [ + "core_affinity", + "derive_more", + "error-stack", + "index_vec", + "itertools 0.11.0", + "loom", + "serde", + "sparrow-arrow", + "tracing", + "work-queue", +] + +[[package]] +name = "sparrow-session" +version = "0.11.0" +dependencies = [ + "arrow-array", + "arrow-schema", + "arrow-select", + "derive_more", + "error-stack", + "futures", + "itertools 0.11.0", + "smallvec", + "sparrow-api", + "sparrow-compiler", + "sparrow-instructions", + "sparrow-merge", + "sparrow-runtime", + "sparrow-syntax", + "static_init", + "tokio", + "tokio-stream", + "uuid 1.4.1", +] + [[package]] name = "sparrow-syntax" -version = "0.9.0" +version = "0.11.0" dependencies = [ "anyhow", "approx 0.5.1", @@ -4758,9 +4888,9 @@ dependencies = [ "bitvec", "codespan-reporting", "decorum", - "hashbrown 0.13.2", + "hashbrown 0.14.0", "insta", - "itertools", + "itertools 0.11.0", "lalrpop", "lalrpop-util", "logos", @@ -4774,7 +4904,7 @@ dependencies = [ [[package]] name = "sparrow-testing" -version = "0.9.0" +version = "0.11.0" dependencies = [ "arrow-array", "arrow-csv", @@ -4783,7 +4913,7 @@ dependencies = [ "arrow-select", "derive_more", "error-stack", - "itertools", + "itertools 0.11.0", "parquet", "serde", "serde_json", @@ -4793,6 +4923,23 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "sparrow-transforms" +version = "0.11.0" +dependencies = [ + "arrow-array", + "arrow-schema", + "derive_more", + "error-stack", + "itertools 0.11.0", + "parking_lot 0.12.1", + "sparrow-arrow", + "sparrow-expressions", + "sparrow-physical", + "sparrow-scheduler", + "tracing", +] + [[package]] name = "spin" version = "0.5.2" @@ -4876,7 +5023,7 @@ dependencies = [ "proc-macro2", "quote", "structmeta-derive", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -4887,7 +5034,7 @@ checksum = "a60bcaff7397072dca0017d1db428e30d5002e00b6847703e2e42005c95fbe00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -4902,6 +5049,12 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +[[package]] +name = "strum" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" + [[package]] name = "strum_macros" version = "0.18.0" @@ -4927,6 +5080,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum_macros" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad8d03b598d3d0fff69bf533ee3ef19b8eeb342729596df84bcc7e1f96ec4059" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.29", +] + [[package]] name = "substring" version = "1.4.5" @@ -4965,9 +5131,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.26" +version = "2.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" dependencies = [ "proc-macro2", "quote", @@ -5000,15 +5166,14 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.6.0" +version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ - "autocfg", "cfg-if", - "fastrand", + "fastrand 2.0.0", "redox_syscall 0.3.5", - "rustix 0.37.23", + "rustix 0.38.8", "windows-sys 0.48.0", ] @@ -5061,30 +5226,24 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" -[[package]] -name = "textwrap" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" - [[package]] name = "thiserror" -version = "1.0.43" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42" +checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.43" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" +checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -5154,18 +5313,17 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.29.1" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" +checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" dependencies = [ - "autocfg", "backtrace", "bytes", "libc", "mio", "num_cpus", "pin-project-lite", - "socket2", + "socket2 0.5.3", "tokio-macros", "windows-sys 0.48.0", ] @@ -5188,7 +5346,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -5238,11 +5396,36 @@ dependencies = [ [[package]] name = "toml" -version = "0.5.11" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +checksum = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542" dependencies = [ "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.19.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" +dependencies = [ + "indexmap 2.0.0", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", ] [[package]] @@ -5277,13 +5460,41 @@ dependencies = [ "tracing-futures", ] +[[package]] +name = "tonic" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a" +dependencies = [ + "async-trait", + "axum", + "base64 0.21.2", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "tokio", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tonic-build" -version = "0.8.4" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf5e9b9c0f7e0a7c027dcfaba7b2c60816c7049171f679d99ee2ff65d0de8c4" +checksum = "a6fdaae4c2c638bb70fe42803a26fbd6fc6ac8c72f5c59f67ecc2a2dcabf4b07" dependencies = [ - "prettyplease", + "prettyplease 0.1.25", "proc-macro2", "prost-build", "quote", @@ -5292,30 +5503,28 @@ dependencies = [ [[package]] name = "tonic-health" -version = "0.8.0" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a88aee666ef3a4d1ee46218bbc8e5f69bcf9cc27bf2e871d6b724d83f56d179f" +checksum = "080964d45894b90273d2b1dd755fdd114560db8636bb41cea615213c45043c4d" dependencies = [ "async-stream", - "bytes", "prost", "tokio", "tokio-stream", - "tonic", + "tonic 0.9.2", ] [[package]] name = "tonic-reflection" -version = "0.6.0" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67494bad4dda4c9bffae901dfe14e2b2c0f760adb4706dc10beeb81799f7f7b2" +checksum = "0543d7092032041fbeac1f2c84304537553421a11a623c2301b12ef0264862c7" dependencies = [ - "bytes", "prost", "prost-types", "tokio", "tokio-stream", - "tonic", + "tonic 0.9.2", ] [[package]] @@ -5370,7 +5579,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -5416,9 +5625,9 @@ dependencies = [ [[package]] name = "tracing-opentelemetry" -version = "0.18.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21ebb87a95ea13271332df069020513ab70bdb5637ca42d6e492dc3bbbad48de" +checksum = "00a39dcf9bfc1742fa4d6215253b33a6e474be78275884c216fc2a06267b3600" dependencies = [ "once_cell", "opentelemetry", @@ -5506,9 +5715,9 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" [[package]] name = "typetag" -version = "0.2.10" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66aafcfb982bf1f9a28755ac6bcbdcd4631ff516cb038fa61299201ebb4364" +checksum = "80960fd143d4c96275c0e60b08f14b81fbb468e79bc0ef8fbda69fb0afafae43" dependencies = [ "erased-serde", "inventory", @@ -5519,13 +5728,13 @@ dependencies = [ [[package]] name = "typetag-impl" -version = "0.2.10" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d836cd032f71d90cbaa3c1f85ce84266af23659766d8c0b1c4c6524a0fb4c36f" +checksum = "bfc13d450dc4a695200da3074dacf43d449b968baee95e341920e47f61a3b40f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -5662,6 +5871,12 @@ dependencies = [ "serde", ] +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + [[package]] name = "utf8parse" version = "0.2.1" @@ -5786,7 +6001,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", "wasm-bindgen-shared", ] @@ -5820,7 +6035,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5833,9 +6048,9 @@ checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-streams" -version = "0.2.3" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bbae3363c08332cadccd13b67db371814cd214c2524020932f0804b8cf7c078" +checksum = "b4609d447824375f43e1ffbc051b50ad8f4b3ae8219680c94452ea05eb240ac7" dependencies = [ "futures-util", "js-sys", @@ -5854,24 +6069,11 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "webpki" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "webpki-roots" -version = "0.22.6" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" -dependencies = [ - "webpki", -] +checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" [[package]] name = "which" @@ -5921,7 +6123,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -5939,7 +6141,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -5959,17 +6161,17 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.48.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.48.0", - "windows_aarch64_msvc 0.48.0", - "windows_i686_gnu 0.48.0", - "windows_i686_msvc 0.48.0", - "windows_x86_64_gnu 0.48.0", - "windows_x86_64_gnullvm 0.48.0", - "windows_x86_64_msvc 0.48.0", + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", ] [[package]] @@ -5980,9 +6182,9 @@ checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_msvc" @@ -5992,9 +6194,9 @@ checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_i686_gnu" @@ -6004,9 +6206,9 @@ checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_msvc" @@ -6016,9 +6218,9 @@ checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_x86_64_gnu" @@ -6028,9 +6230,9 @@ checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnullvm" @@ -6040,9 +6242,9 @@ checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_msvc" @@ -6052,17 +6254,37 @@ checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "winnow" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d09770118a7eb1ccaf4a594a221334119a44a814fcb0d31c5b85e83e97227a97" +dependencies = [ + "memchr", +] [[package]] name = "winreg" -version = "0.10.1" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "winapi", + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "work-queue" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1c8d5eab11a669da42347cdd5244563592edabfe06c7778553185ffeedb623d" +dependencies = [ + "concurrent-queue", + "loom", ] [[package]] @@ -6095,12 +6317,12 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "332f188cc1bcf1fe1064b8c58d150f497e697f49774aa846f2dc949d9a25f236" +checksum = "f3b9c234616391070b0b173963ebc65a9195068e7ed3731c6edac2ec45ebe106" dependencies = [ "byteorder", - "zerocopy-derive 0.3.2", + "zerocopy-derive 0.6.3", ] [[package]] @@ -6116,29 +6338,29 @@ dependencies = [ [[package]] name = "zerocopy-derive" -version = "0.3.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6505e6815af7de1746a08f69c69606bb45695a17149517680f3b2149713b19a3" +checksum = "8f7f3a471f98d0a61c34322fbbfd10c384b07687f680d4119813713f72308d91" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.29", ] [[package]] name = "zstd" -version = "0.12.3+zstd.1.5.2" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806" +checksum = "1a27595e173641171fc74a1232b7b1c7a7cb6e18222c11e9dfb9888fa424c53c" dependencies = [ "zstd-safe", ] [[package]] name = "zstd-safe" -version = "6.0.5+zstd.1.5.4" +version = "6.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d56d9e60b4b1758206c238a10165fbcae3ca37b01744e394c463463f6529d23b" +checksum = "ee98ffd0b48ee95e6c5168188e44a54550b1564d9d530ee21d5f0eaed1069581" dependencies = [ "libc", "zstd-sys", diff --git a/Cargo.toml b/Cargo.toml index e724d58ef..0949ef50b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,10 +4,13 @@ members = ["crates/*"] [workspace.package] authors = ["Kaskada Developers"] edition = "2021" -version = "0.9.0" +version = "0.11.0" license = "Apache-2.0" [workspace.dependencies] + +logos = "0.12.1" +serde = { version = "1.0.159", features = ["derive", "rc"] } ahash = "0.8.3" anyhow = { version = "1.0.70", features = ["backtrace"] } approx = "0.5.1" @@ -17,18 +20,18 @@ arrow-array = { version = "43.0.0" } arrow-buffer = { version = "43.0.0" } arrow-cast = { version = "43.0.0" } arrow-csv = { version = "43.0.0" } -arrow-data = { version ="43.0.0" } +arrow-data = { version = "43.0.0" } arrow-json = { version = "43.0.0" } arrow-ord = { version = "43.0.0" } arrow-schema = { version = "43.0.0", features = ["serde"] } arrow-select = { version = "43.0.0" } arrow-string = { version = "43.0.0" } -async-once-cell = "0.3.1" +async-once-cell = "0.5.3" async-stream = "0.3.4" async-trait = "0.1.68" avro-rs = "0.13.0" avro-schema = "0.3.0" -bigdecimal = { version = "0.3.1", features = ["serde"] } +bigdecimal = { version = "0.4.1", features = ["serde"] } bincode = "1.3.3" bit-set = "0.5.3" bitvec = { version = "1.0.1", features = ["serde"] } @@ -38,8 +41,9 @@ chronoutil = "0.2.3" clap = { version = "4.2.0", features = ["derive", "env"] } codespan-reporting = "0.11.1" const_format = "0.2.30" +core_affinity = "0.8.0" cpu-time = "1.0.0" -criterion = { version = "0.4.0", default-features = false, features = [ +criterion = { version = "0.5.1", default-features = false, features = [ "async_tokio", ] } dashmap = "5.4.0" @@ -52,30 +56,30 @@ enum-as-inner = "0.6.0" enum-map = "2.5.0" erased-serde = "0.3.25" error-stack = { version = "0.3.1", features = ["anyhow", "spantrace"] } -fallible-iterator = "0.2.0" +fallible-iterator = "0.3.0" futures = "0.3.27" futures-lite = "1.12.0" half = { version = "2.2.1", features = ["serde"] } -hashbrown = { version = "0.13.2", features = ["serde"] } +hashbrown = { version = "0.14.0", features = ["serde"] } hex = "0.4.3" index_vec = { version = "0.1.3", features = ["serde"] } indoc = "1.0.9" insta = { version = "1.29.0", features = ["ron", "yaml", "json"] } inventory = "0.3.8" -itertools = "0.10.5" kafka = "0.9.0" -lalrpop = "0.19.9" -lalrpop-util = "0.19.9" -logos = "0.12.1" +itertools = "0.11.0" +lalrpop = "0.20.0" +lalrpop-util = "0.20.0" lz4 = "1.24.0" lz4-sys = "1.9.4" num = "0.4.0" num-traits = "0.2.15" object_store = { version = "0.6.1", features = ["aws", "gcp"] } once_cell = "1.17.1" -opentelemetry = { version = "0.18.0", features = ["rt-tokio"] } -opentelemetry-otlp = "0.11.0" +opentelemetry = { version = "0.19.0", features = ["rt-tokio"] } +opentelemetry-otlp = "0.12.0" owning_ref = "0.4.1" +parking_lot = { version = "0.12.1" } parquet = { version = "43.0.0", features = ["async"] } parse-display = "0.8.0" pin-project = "1.0.12" @@ -88,11 +92,14 @@ prost-types = "0.11.8" prost-wkt = "0.4.1" prost-wkt-build = "0.4.1" prost-wkt-types = "0.4.1" -pulsar = { version = "5.1.0", default-features = false, features = ["async-std-runtime", "tokio-runtime", "lz4"] } +pulsar = { version = "5.1.0", default-features = false, features = [ + "async-std-runtime", + "tokio-runtime", + "lz4", +] } rand = "0.8.5" -reqwest = "0.11.14" +reqwest = { version = "0.11.14", features = ["native-tls-vendored"] } schema_registry_converter = { version = "3.1.0", features = ["avro"] } -serde = { version = "1.0.159", features = ["derive", "rc"] } serde_json = "1.0.95" serde_yaml = "0.9.19" sha2 = "0.10.6" @@ -101,8 +108,8 @@ similar-asserts = "1.4.2" smallvec = { version = "1.10.0", features = ["union", "serde"] } static_assertions = "1.1.0" static_init = "1.0.3" -strum = "0.24.1" -strum_macros = "0.24.3" +strum = "0.25.0" +strum_macros = "0.25.1" substring = "1.4.5" tempfile = "3.4.0" tera = "1.19.0" @@ -118,14 +125,14 @@ tokio = { version = "1.27.0", features = [ ] } tokio-stream = { version = "0.1.12", features = ["fs"] } tokio-util = { version = "0.7.7", features = ["io"] } -toml = "0.5.11" -tonic = "0.8.3" -tonic-build = { version = "0.8.4", features = ["prost"] } -tonic-health = "0.8.0" -tonic-reflection = "0.6.0" +toml = "0.7.6" +tonic = "0.9.2" +tonic-build = { version = "0.9.2", features = ["prost"] } +tonic-health = "0.9.2" +tonic-reflection = "0.9.2" tracing = "0.1.37" tracing-error = "0.2.0" -tracing-opentelemetry = "0.18.0" +tracing-opentelemetry = "0.19.0" tracing-serde = "0.1.3" tracing-subscriber = { version = "0.3.17", features = [ "env-filter", @@ -139,12 +146,13 @@ uuid = { version = "1.3.0", features = ["v4"] } # This disables compression algorithms that cause issues during linking due to # https://github.com/rust-rocksdb/rust-rocksdb/issues/514 default-features = false -version = "0.19.0" +version = "0.21.0" features = ["lz4"] [profile.release] lto = "thin" -debug = 0 # Set this to 1 or 2 to get more useful backtraces from debugger +debug = 0 # Set this to 1 or 2 to get more useful backtraces from debugger +codegen-units = 1 # Enable max optimizations for dependencies, but not for our code [profile.dev.package."*"] @@ -154,3 +162,4 @@ opt-level = 3 [profile.dev] opt-level = 1 debug = 2 +codegen-units = 1 diff --git a/Makefile b/Makefile index 48de30a14..00fabf6df 100644 --- a/Makefile +++ b/Makefile @@ -64,8 +64,8 @@ test/int/docker-up: test/int/docker-up-s3: docker compose -f ./tests/integration/docker-compose.yml -f ./tests/integration/docker-compose.s3.yml up --build --remove-orphans --force-recreate -test/int/docker-up-s3-only: - docker compose -f ./tests/integration/docker-compose.yml -f ./tests/integration/docker-compose.s3.yml up --build --remove-orphans --force-recreate minio +test/int/docker-up-dependencies-only: + docker compose -f ./tests/integration/docker-compose.yml -f ./tests/integration/docker-compose.s3.yml up --build --remove-orphans --force-recreate minio pulsar test/int/docker-up-postgres: docker compose -f ./tests/integration/docker-compose.yml -f ./tests/integration/docker-compose.postgres.yml up --build --remove-orphans --force-recreate diff --git a/clients/docker-compose.yml b/clients/docker-compose.yml index 0b26a186c..fc55fc359 100644 --- a/clients/docker-compose.yml +++ b/clients/docker-compose.yml @@ -36,3 +36,13 @@ services: poetry run poe test volumes: - ./python:/src + + push-timestreams: + build: + dockerfile: Dockerfile.poetry + container_name: push-python + command: ["poetry", "publish", "--build"] + environment: + - POETRY_PYPI_TOKEN_PYPI=$POETRY_PYPI_TOKEN_PYPI + volumes: + - ./timestreams:/src diff --git a/clients/python/poetry.lock b/clients/python/poetry.lock index 3c7237035..c47e57abc 100644 --- a/clients/python/poetry.lock +++ b/clients/python/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -16,7 +15,6 @@ files = [ name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" -category = "main" optional = false python-versions = "*" files = [ @@ -26,14 +24,13 @@ files = [ [[package]] name = "astroid" -version = "2.15.5" +version = "2.15.6" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "astroid-2.15.5-py3-none-any.whl", hash = "sha256:078e5212f9885fa85fbb0cf0101978a336190aadea6e13305409d099f71b2324"}, - {file = "astroid-2.15.5.tar.gz", hash = "sha256:1039262575027b441137ab4a62a793a9b43defb42c32d5670f38686207cd780f"}, + {file = "astroid-2.15.6-py3-none-any.whl", hash = "sha256:389656ca57b6108f939cf5d2f9a2a825a3be50ba9d589670f393236e0a03b91c"}, + {file = "astroid-2.15.6.tar.gz", hash = "sha256:903f024859b7c7687d7a7f3a3f73b17301f8e42dfd9cc9df9d4418172d3e2dbd"}, ] [package.dependencies] @@ -46,14 +43,13 @@ wrapt = [ [[package]] name = "autoflake" -version = "2.1.1" +version = "2.2.0" description = "Removes unused imports and unused variables" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "autoflake-2.1.1-py3-none-any.whl", hash = "sha256:94e330a2bcf5ac01384fb2bf98bea60c6383eaa59ea62be486e376622deba985"}, - {file = "autoflake-2.1.1.tar.gz", hash = "sha256:75524b48d42d6537041d91f17573b8a98cb645642f9f05c7fcc68de10b1cade3"}, + {file = "autoflake-2.2.0-py3-none-any.whl", hash = "sha256:de409b009a34c1c2a7cc2aae84c4c05047f9773594317c6a6968bd497600d4a0"}, + {file = "autoflake-2.2.0.tar.gz", hash = "sha256:62e1f74a0fdad898a96fee6f99fe8241af90ad99c7110c884b35855778412251"}, ] [package.dependencies] @@ -64,7 +60,6 @@ tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} name = "autopep8" version = "2.0.2" description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -80,7 +75,6 @@ tomli = {version = "*", markers = "python_version < \"3.11\""} name = "babel" version = "2.12.1" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -95,7 +89,6 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" -category = "main" optional = false python-versions = "*" files = [ @@ -107,7 +100,6 @@ files = [ name = "black" version = "22.12.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -143,7 +135,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "certifi" version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -155,7 +146,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = "*" files = [ @@ -230,99 +220,97 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] [[package]] name = "click" -version = "8.1.3" +version = "8.1.6" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, + {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, ] [package.dependencies] @@ -332,7 +320,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -342,63 +329,71 @@ files = [ [[package]] name = "coverage" -version = "7.2.5" +version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, - {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, - {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, - {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, - {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, - {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, - {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, - {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, - {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, - {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, - {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, - {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, - {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, - {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, - {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [package.dependencies] @@ -409,31 +404,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "40.0.2" +version = "41.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, - {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, - {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, - {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, + {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, + {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, + {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, ] [package.dependencies] @@ -442,18 +440,17 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff"] -sdist = ["setuptools-rust (>=0.11.4)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -tox = ["tox"] [[package]] name = "cssselect" version = "1.1.0" description = "cssselect parses CSS3 Selectors and translates them to XPath 1.0" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -465,7 +462,6 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -475,32 +471,30 @@ files = [ [[package]] name = "deprecated" -version = "1.2.13" +version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, - {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, ] [package.dependencies] wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] [[package]] name = "dill" -version = "0.3.6" -description = "serialize all of python" -category = "dev" +version = "0.3.7" +description = "serialize all of Python" optional = false python-versions = ">=3.7" files = [ - {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, - {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, ] [package.extras] @@ -510,7 +504,6 @@ graph = ["objgraph (>=1.7.2)"] name = "docutils" version = "0.18.1" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -522,7 +515,6 @@ files = [ name = "domonic" version = "0.9.11" description = "Generate html with python 3. DOM API, Javascript API and more..." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -542,7 +534,6 @@ urllib3 = ">=1.26.9,<1.27.0" name = "elementpath" version = "2.5.3" description = "XPath 1.0/2.0/3.0 parsers and selectors for ElementTree and lxml" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -555,14 +546,13 @@ dev = ["Sphinx", "coverage", "flake8", "lxml", "memory-profiler", "mypy (==0.950 [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.1.2" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, + {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, ] [package.extras] @@ -570,118 +560,113 @@ test = ["pytest (>=6)"] [[package]] name = "googleapis-common-protos" -version = "1.59.0" +version = "1.59.1" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.59.0.tar.gz", hash = "sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44"}, - {file = "googleapis_common_protos-1.59.0-py2.py3-none-any.whl", hash = "sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f"}, + {file = "googleapis-common-protos-1.59.1.tar.gz", hash = "sha256:b35d530fe825fb4227857bc47ad84c33c809ac96f312e13182bdeaa2abe1178a"}, + {file = "googleapis_common_protos-1.59.1-py2.py3-none-any.whl", hash = "sha256:0cbedb6fb68f1c07e18eb4c48256320777707e7d0c55063ae56c15db3224a61e"}, ] [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0dev)"] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpcio" -version = "1.55.0" +version = "1.56.2" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-1.55.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:7b38e028a7bbc97a9ae5e418712452f298618b9d0493390770bf2de785251ae7"}, - {file = "grpcio-1.55.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:054b7164b25712ec71339e139875a66708a2ab09be36ac75e73b2d337ab2dc1b"}, - {file = "grpcio-1.55.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:1982c99c7091d1b7e3e78b1173097f705feef233e253a27e99746b11815ac897"}, - {file = "grpcio-1.55.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8bd4f4932ef63ed32a725065aebb8585e4118a523d923db896e85c09429a36e6"}, - {file = "grpcio-1.55.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70de2b73cf22241173cb21d308786ba4ea443e4c88441a2ce445829aa638dda8"}, - {file = "grpcio-1.55.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2d25d7fcb528a40578b3d0428d401745fd5c0eeeda81f35ce2f40a10d79afd19"}, - {file = "grpcio-1.55.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1173a05117798aca4834d3edd504e6adc25ae9967df0f44b91a612884fb2707a"}, - {file = "grpcio-1.55.0-cp310-cp310-win32.whl", hash = "sha256:7c00263d792a244bef67a8d3b357ccbcdae6341c5961dbee494d8f967f9aee69"}, - {file = "grpcio-1.55.0-cp310-cp310-win_amd64.whl", hash = "sha256:ab784204d9923368e0e5877d7795584b9606a51b128ee199ad8b5888d0c66592"}, - {file = "grpcio-1.55.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c97cfae0b7a17dc1a0a3e4333f4f46daa114d85f950a67f39cc141b5425182e4"}, - {file = "grpcio-1.55.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:8a910fa9b95a286f4bc1879dcf8d5ccb95b5e33bb63323fc4414d157f23afef1"}, - {file = "grpcio-1.55.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:3ab9bf80c19c91847f45ff32af94c85d282545a62db39d797838244d57831d78"}, - {file = "grpcio-1.55.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4370d2cca37301bcc69453d3dd3c1576d06d6b3e337bfec55b3aab2fe106b25c"}, - {file = "grpcio-1.55.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dad999423b33ad5409e986587593b6062a8260b74ae8fc8162ce231c6b7a929e"}, - {file = "grpcio-1.55.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d396ec4d520b58f43142958cff071e5ad1c50ac87d29d086a9c6a990a09ea536"}, - {file = "grpcio-1.55.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2a3b837d5837b9069783026b57aa0ff12e34d3218fdeda3f9c06d3950266d8e"}, - {file = "grpcio-1.55.0-cp311-cp311-win32.whl", hash = "sha256:ee0de9cb6813704969e53743e0969fd95225ff24bd686c89ed12a18147f6566c"}, - {file = "grpcio-1.55.0-cp311-cp311-win_amd64.whl", hash = "sha256:9a11b1dd4b1572e85fba5911309c15980a1ff77c555fad0ecdbe3711ef741908"}, - {file = "grpcio-1.55.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:d0209fb3cb55c5288a1dec72dcaae2c1b501edceca10d22c0f0baa5e60e2b22c"}, - {file = "grpcio-1.55.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:322d4ebc37cbc8d8596b1da6055e3e81e8cfd36816ab4b285c1163c3042e6067"}, - {file = "grpcio-1.55.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:60efab181c32e029e0960f238508396dd001ba2064168f8148e6356db093967c"}, - {file = "grpcio-1.55.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f6088d898e1e987d761d58dc4cd724e7457a7a86d11561fa95c3b826d025dc"}, - {file = "grpcio-1.55.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29ab0e879b1585be41cfbb02faed67913700ced8015da4763f1f0bdd7dfb4ab7"}, - {file = "grpcio-1.55.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:157f5615c7b5d0968727472f6394dee01555ef4246d2f2cfb6555be857936d74"}, - {file = "grpcio-1.55.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:67c4fda71f92225c5e74fa15bffa6be022c07111f674fe1f234c1ef4c1bb7927"}, - {file = "grpcio-1.55.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a202dcf0c512292fd7a2154e4044c70400212eaa726685ebf8af105e25693c5a"}, - {file = "grpcio-1.55.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:ce82d06cdfb8a9292fb857f00bee11a2430e4ac2742e07b46c1a3072d683256a"}, - {file = "grpcio-1.55.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:51b7a27a129f743d68394f94029f88ef3da090fc13776b9dfa3c79c5f4b30525"}, - {file = "grpcio-1.55.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:7c32f87bec58a8a0d4f4d5387bd61a383bd32b2caffb2de3cd579e47490b7e19"}, - {file = "grpcio-1.55.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89107071b5f14af6bbb855183d338a0fa94136bbeb3989c9773c6184e51a95e9"}, - {file = "grpcio-1.55.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1041cad23f00943d8889ad15427d87bbdacbbe2df5cec951c314f2f3967d4691"}, - {file = "grpcio-1.55.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:56631cc0bdf86d15ea1599b9697ace65e6b52c6b136d3666bf7769d3d6d087a8"}, - {file = "grpcio-1.55.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:10af4774da9c0665a1bf519333694ac40d72d83cb514534b99db0a5e3d5c3593"}, - {file = "grpcio-1.55.0-cp38-cp38-win32.whl", hash = "sha256:7b8665da31b5bd701b338a581de7b9631d50b4b7ee67125c2d1dc2228cc119d8"}, - {file = "grpcio-1.55.0-cp38-cp38-win_amd64.whl", hash = "sha256:74780f570c76feb8e62a8c019b495fea435b60218682fce513ff2c71262c346c"}, - {file = "grpcio-1.55.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:6b8dbb151b116825c10f01e5b7b75e14edd0e60736a65311d0d98a4cd0489303"}, - {file = "grpcio-1.55.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:a82283d6e0403d3e2e7eebb99cb0d2783e20b6791c8c94bd8d4a4233b58b1ea0"}, - {file = "grpcio-1.55.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:ba32a8e9bc3eecc6bab6824b905f04c3fdc31659c3e6e06841b774e7cb4410af"}, - {file = "grpcio-1.55.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1e2b705d524e780998218cf429d30b6ffc54cb6e54812c9597bc5df12dbcb5b"}, - {file = "grpcio-1.55.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe78365c64b2c7470d31c4941e10c6654042bcbb53897b9b1e2c96d6d0da9ef9"}, - {file = "grpcio-1.55.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b440ccc434c1ad5874465bfae40c0a27f562ae5f7c5b468b6689bc55e8bf1c1"}, - {file = "grpcio-1.55.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0d3d5c644d523dee82ffcc44ad50cd66e3bf66e7fa60ad3cdb1eb868228e4ab0"}, - {file = "grpcio-1.55.0-cp39-cp39-win32.whl", hash = "sha256:c33dbeecc14f1a413e8af8ae1208cb383b063fa2ff2e1f309b4d3d7739b0927e"}, - {file = "grpcio-1.55.0-cp39-cp39-win_amd64.whl", hash = "sha256:2663741acc117370fd53336267cfb24c965e9d3ea1e4933a3e4411712d3091fb"}, - {file = "grpcio-1.55.0.tar.gz", hash = "sha256:dd15027a171ff93c97f9c704fa120bc5d0691dc7e71ae450e2ecade1a2799b53"}, + {file = "grpcio-1.56.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:bf0b9959e673505ee5869950642428046edb91f99942607c2ecf635f8a4b31c9"}, + {file = "grpcio-1.56.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:5144feb20fe76e73e60c7d73ec3bf54f320247d1ebe737d10672480371878b48"}, + {file = "grpcio-1.56.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a72797549935c9e0b9bc1def1768c8b5a709538fa6ab0678e671aec47ebfd55e"}, + {file = "grpcio-1.56.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3f3237a57e42f79f1e560726576aedb3a7ef931f4e3accb84ebf6acc485d316"}, + {file = "grpcio-1.56.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:900bc0096c2ca2d53f2e5cebf98293a7c32f532c4aeb926345e9747452233950"}, + {file = "grpcio-1.56.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:97e0efaebbfd222bcaac2f1735c010c1d3b167112d9d237daebbeedaaccf3d1d"}, + {file = "grpcio-1.56.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c0c85c5cbe8b30a32fa6d802588d55ffabf720e985abe9590c7c886919d875d4"}, + {file = "grpcio-1.56.2-cp310-cp310-win32.whl", hash = "sha256:06e84ad9ae7668a109e970c7411e7992751a116494cba7c4fb877656527f9a57"}, + {file = "grpcio-1.56.2-cp310-cp310-win_amd64.whl", hash = "sha256:10954662f77dc36c9a1fb5cc4a537f746580d6b5734803be1e587252682cda8d"}, + {file = "grpcio-1.56.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:c435f5ce1705de48e08fcbcfaf8aee660d199c90536e3e06f2016af7d6a938dd"}, + {file = "grpcio-1.56.2-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:6108e5933eb8c22cd3646e72d5b54772c29f57482fd4c41a0640aab99eb5071d"}, + {file = "grpcio-1.56.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8391cea5ce72f4a12368afd17799474015d5d3dc00c936a907eb7c7eaaea98a5"}, + {file = "grpcio-1.56.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:750de923b456ca8c0f1354d6befca45d1f3b3a789e76efc16741bd4132752d95"}, + {file = "grpcio-1.56.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fda2783c12f553cdca11c08e5af6eecbd717280dc8fbe28a110897af1c15a88c"}, + {file = "grpcio-1.56.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9e04d4e4cfafa7c5264e535b5d28e786f0571bea609c3f0aaab13e891e933e9c"}, + {file = "grpcio-1.56.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89a49cc5ad08a38b6141af17e00d1dd482dc927c7605bc77af457b5a0fca807c"}, + {file = "grpcio-1.56.2-cp311-cp311-win32.whl", hash = "sha256:6a007a541dff984264981fbafeb052bfe361db63578948d857907df9488d8774"}, + {file = "grpcio-1.56.2-cp311-cp311-win_amd64.whl", hash = "sha256:af4063ef2b11b96d949dccbc5a987272f38d55c23c4c01841ea65a517906397f"}, + {file = "grpcio-1.56.2-cp37-cp37m-linux_armv7l.whl", hash = "sha256:a6ff459dac39541e6a2763a4439c4ca6bc9ecb4acc05a99b79246751f9894756"}, + {file = "grpcio-1.56.2-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:f20fd21f7538f8107451156dd1fe203300b79a9ddceba1ee0ac8132521a008ed"}, + {file = "grpcio-1.56.2-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:d1fbad1f9077372b6587ec589c1fc120b417b6c8ad72d3e3cc86bbbd0a3cee93"}, + {file = "grpcio-1.56.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee26e9dfb3996aff7c870f09dc7ad44a5f6732b8bdb5a5f9905737ac6fd4ef1"}, + {file = "grpcio-1.56.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c60abd950d6de3e4f1ddbc318075654d275c29c846ab6a043d6ed2c52e4c8c"}, + {file = "grpcio-1.56.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1c31e52a04e62c8577a7bf772b3e7bed4df9c9e0dd90f92b6ffa07c16cab63c9"}, + {file = "grpcio-1.56.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:345356b307cce5d14355e8e055b4ca5f99bc857c33a3dc1ddbc544fca9cd0475"}, + {file = "grpcio-1.56.2-cp37-cp37m-win_amd64.whl", hash = "sha256:42e63904ee37ae46aa23de50dac8b145b3596f43598fa33fe1098ab2cbda6ff5"}, + {file = "grpcio-1.56.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:7c5ede2e2558f088c49a1ddda19080e4c23fb5d171de80a726b61b567e3766ed"}, + {file = "grpcio-1.56.2-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:33971197c47965cc1d97d78d842163c283e998223b151bab0499b951fd2c0b12"}, + {file = "grpcio-1.56.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d39f5d4af48c138cb146763eda14eb7d8b3ccbbec9fe86fb724cd16e0e914c64"}, + {file = "grpcio-1.56.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ded637176addc1d3eef35331c39acc598bac550d213f0a1bedabfceaa2244c87"}, + {file = "grpcio-1.56.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c90da4b124647547a68cf2f197174ada30c7bb9523cb976665dfd26a9963d328"}, + {file = "grpcio-1.56.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3ccb621749a81dc7755243665a70ce45536ec413ef5818e013fe8dfbf5aa497b"}, + {file = "grpcio-1.56.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4eb37dd8dd1aa40d601212afa27ca5be255ba792e2e0b24d67b8af5e012cdb7d"}, + {file = "grpcio-1.56.2-cp38-cp38-win32.whl", hash = "sha256:ddb4a6061933bd9332b74eac0da25f17f32afa7145a33a0f9711ad74f924b1b8"}, + {file = "grpcio-1.56.2-cp38-cp38-win_amd64.whl", hash = "sha256:8940d6de7068af018dfa9a959a3510e9b7b543f4c405e88463a1cbaa3b2b379a"}, + {file = "grpcio-1.56.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:51173e8fa6d9a2d85c14426bdee5f5c4a0654fd5fddcc21fe9d09ab0f6eb8b35"}, + {file = "grpcio-1.56.2-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:373b48f210f43327a41e397391715cd11cfce9ded2fe76a5068f9bacf91cc226"}, + {file = "grpcio-1.56.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:42a3bbb2bc07aef72a7d97e71aabecaf3e4eb616d39e5211e2cfe3689de860ca"}, + {file = "grpcio-1.56.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5344be476ac37eb9c9ad09c22f4ea193c1316bf074f1daf85bddb1b31fda5116"}, + {file = "grpcio-1.56.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3fa3ab0fb200a2c66493828ed06ccd1a94b12eddbfb985e7fd3e5723ff156c6"}, + {file = "grpcio-1.56.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b975b85d1d5efc36cf8b237c5f3849b64d1ba33d6282f5e991f28751317504a1"}, + {file = "grpcio-1.56.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cbdf2c498e077282cd427cfd88bdce4668019791deef0be8155385ab2ba7837f"}, + {file = "grpcio-1.56.2-cp39-cp39-win32.whl", hash = "sha256:139f66656a762572ae718fa0d1f2dce47c05e9fbf7a16acd704c354405b97df9"}, + {file = "grpcio-1.56.2-cp39-cp39-win_amd64.whl", hash = "sha256:830215173ad45d670140ff99aac3b461f9be9a6b11bee1a17265aaaa746a641a"}, + {file = "grpcio-1.56.2.tar.gz", hash = "sha256:0ff789ae7d8ddd76d2ac02e7d13bfef6fc4928ac01e1dcaa182be51b6bcc0aaa"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.55.0)"] +protobuf = ["grpcio-tools (>=1.56.2)"] [[package]] name = "grpcio-health-checking" -version = "1.55.0" +version = "1.56.2" description = "Standard Health Checking Service for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-health-checking-1.55.0.tar.gz", hash = "sha256:66bf5e320034392cbd14a763980293e70695b0c03d1c8c3aa854fe6aa43981f9"}, - {file = "grpcio_health_checking-1.55.0-py3-none-any.whl", hash = "sha256:1f9e6f6019c1ae7c46d4cf847d058a65d3e7c82b4774a31f3c34a9641aae47d4"}, + {file = "grpcio-health-checking-1.56.2.tar.gz", hash = "sha256:5cda1d8a1368be2cda04f9284a8b73cee09ff3e277eec8ddd9abcf2fef76b372"}, + {file = "grpcio_health_checking-1.56.2-py3-none-any.whl", hash = "sha256:d0aedbcdbb365c08a5bd860384098502e35045e31fdd9d80e440bb58487e83d7"}, ] [package.dependencies] -grpcio = ">=1.55.0" +grpcio = ">=1.56.2" protobuf = ">=4.21.6" [[package]] name = "grpcio-status" -version = "1.55.0" +version = "1.56.2" description = "Status proto mapping for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "grpcio-status-1.55.0.tar.gz", hash = "sha256:beeca8d5d3783e155676beaade0dae9eaea12cd9701498905dca0d35bd6b36f8"}, - {file = "grpcio_status-1.55.0-py3-none-any.whl", hash = "sha256:6da36bab11bb252b6854b86578f484c4fed9f8169816b490b6d3a32ec2a971fe"}, + {file = "grpcio-status-1.56.2.tar.gz", hash = "sha256:a046b2c0118df4a5687f4585cca9d3c3bae5c498c4dff055dcb43fb06a1180c8"}, + {file = "grpcio_status-1.56.2-py3-none-any.whl", hash = "sha256:63f3842867735f59f5d70e723abffd2e8501a6bcd915612a1119e52f10614782"}, ] [package.dependencies] googleapis-common-protos = ">=1.5.5" -grpcio = ">=1.55.0" +grpcio = ">=1.56.2" protobuf = ">=4.21.6" [[package]] name = "html5lib" version = "1.1" description = "HTML parser based on the WHATWG HTML specification" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -703,7 +688,6 @@ lxml = ["lxml"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -715,7 +699,6 @@ files = [ name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -725,14 +708,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.6.0" +version = "6.8.0" description = "Read metadata from Python packages" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, - {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, + {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, + {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, ] [package.dependencies] @@ -741,13 +723,12 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -759,7 +740,6 @@ files = [ name = "ipython" version = "7.34.0" description = "IPython: Productive Interactive Computing" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -796,7 +776,6 @@ test = ["ipykernel", "nbformat", "nose (>=0.10.1)", "numpy (>=1.17)", "pygments" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -814,7 +793,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "jedi" version = "0.18.2" description = "An autocompletion tool for Python that can be used for text editors." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -834,7 +812,6 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -852,7 +829,6 @@ i18n = ["Babel (>=2.7)"] name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -896,69 +872,67 @@ files = [ [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -973,7 +947,6 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -985,7 +958,6 @@ files = [ name = "mypy" version = "0.991" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1036,7 +1008,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1046,47 +1017,45 @@ files = [ [[package]] name = "numpy" -version = "1.24.3" +version = "1.24.4" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"}, - {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"}, - {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"}, - {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"}, - {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"}, - {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"}, - {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"}, - {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"}, - {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"}, - {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"}, - {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] [[package]] name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1098,7 +1067,6 @@ files = [ name = "pandas" version = "1.3.5" description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" optional = false python-versions = ">=3.7.1" files = [ @@ -1131,7 +1099,7 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.17.3", markers = "platform_machine != \"aarch64\" and platform_machine != \"arm64\" and python_version < \"3.10\""}, + {version = ">=1.17.3", markers = "(platform_machine != \"aarch64\" and platform_machine != \"arm64\") and python_version < \"3.10\""}, {version = ">=1.19.2", markers = "platform_machine == \"aarch64\" and python_version < \"3.10\""}, {version = ">=1.20.0", markers = "platform_machine == \"arm64\" and python_version < \"3.10\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, @@ -1146,7 +1114,6 @@ test = ["hypothesis (>=3.58)", "pytest (>=6.0)", "pytest-xdist"] name = "parso" version = "0.8.3" description = "A Python Parser" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1162,7 +1129,6 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1174,7 +1140,6 @@ files = [ name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1186,7 +1151,6 @@ files = [ name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." -category = "main" optional = false python-versions = "*" files = [ @@ -1201,7 +1165,6 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" -category = "main" optional = false python-versions = "*" files = [ @@ -1211,30 +1174,28 @@ files = [ [[package]] name = "platformdirs" -version = "3.5.1" +version = "3.9.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-3.9.1-py3-none-any.whl", hash = "sha256:ad8291ae0ae5072f66c16945166cb11c63394c7a3ad1b1bc9828ca3162da8c2f"}, + {file = "platformdirs-3.9.1.tar.gz", hash = "sha256:1b42b450ad933e981d56e59f1b97495428c9bd60698baab9f3eb3d00d5822421"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [package.extras] @@ -1245,7 +1206,6 @@ testing = ["pytest", "pytest-benchmark"] name = "poethepoet" version = "0.18.1" description = "A task runner that works well with poetry." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1262,14 +1222,13 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] [[package]] name = "prompt-toolkit" -version = "3.0.38" +version = "3.0.39" description = "Library for building powerful interactive command lines in Python" -category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, - {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, + {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, + {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, ] [package.dependencies] @@ -1277,32 +1236,30 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "4.23.1" +version = "4.23.4" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-4.23.1-cp310-abi3-win32.whl", hash = "sha256:410bcc0a5b279f634d3e16082ce221dfef7c3392fac723500e2e64d1806dd2be"}, - {file = "protobuf-4.23.1-cp310-abi3-win_amd64.whl", hash = "sha256:32e78beda26d7a101fecf15d7a4a792278a0d26a31bc327ff05564a9d68ab8ee"}, - {file = "protobuf-4.23.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f9510cac91e764e86acd74e2b7f7bc5e6127a7f3fb646d7c8033cfb84fd1176a"}, - {file = "protobuf-4.23.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:346990f634272caac1f09efbcfbbacb23098b1f606d172534c6fa2d9758bb436"}, - {file = "protobuf-4.23.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3ce113b3f3362493bddc9069c2163a38f240a9ed685ff83e7bcb756b05e1deb0"}, - {file = "protobuf-4.23.1-cp37-cp37m-win32.whl", hash = "sha256:2036a3a1e7fc27f973fa0a7888dce712393af644f4695385f117886abc792e39"}, - {file = "protobuf-4.23.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3b8905eafe4439076e1f58e9d1fa327025fd2777cf90f14083092ae47f77b0aa"}, - {file = "protobuf-4.23.1-cp38-cp38-win32.whl", hash = "sha256:5b9cd6097e6acae48a68cb29b56bc79339be84eca65b486910bb1e7a30e2b7c1"}, - {file = "protobuf-4.23.1-cp38-cp38-win_amd64.whl", hash = "sha256:decf119d54e820f298ee6d89c72d6b289ea240c32c521f00433f9dc420595f38"}, - {file = "protobuf-4.23.1-cp39-cp39-win32.whl", hash = "sha256:91fac0753c3c4951fbb98a93271c43cc7cf3b93cf67747b3e600bb1e5cc14d61"}, - {file = "protobuf-4.23.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac50be82491369a9ec3710565777e4da87c6d2e20404e0abb1f3a8f10ffd20f0"}, - {file = "protobuf-4.23.1-py3-none-any.whl", hash = "sha256:65f0ac96ef67d7dd09b19a46aad81a851b6f85f89725577f16de38f2d68ad477"}, - {file = "protobuf-4.23.1.tar.gz", hash = "sha256:95789b569418a3e32a53f43d7763be3d490a831e9c08042539462b6d972c2d7e"}, + {file = "protobuf-4.23.4-cp310-abi3-win32.whl", hash = "sha256:5fea3c64d41ea5ecf5697b83e41d09b9589e6f20b677ab3c48e5f242d9b7897b"}, + {file = "protobuf-4.23.4-cp310-abi3-win_amd64.whl", hash = "sha256:7b19b6266d92ca6a2a87effa88ecc4af73ebc5cfde194dc737cf8ef23a9a3b12"}, + {file = "protobuf-4.23.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8547bf44fe8cec3c69e3042f5c4fb3e36eb2a7a013bb0a44c018fc1e427aafbd"}, + {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fee88269a090ada09ca63551bf2f573eb2424035bcf2cb1b121895b01a46594a"}, + {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:effeac51ab79332d44fba74660d40ae79985901ac21bca408f8dc335a81aa597"}, + {file = "protobuf-4.23.4-cp37-cp37m-win32.whl", hash = "sha256:c3e0939433c40796ca4cfc0fac08af50b00eb66a40bbbc5dee711998fb0bbc1e"}, + {file = "protobuf-4.23.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9053df6df8e5a76c84339ee4a9f5a2661ceee4a0dab019e8663c50ba324208b0"}, + {file = "protobuf-4.23.4-cp38-cp38-win32.whl", hash = "sha256:e1c915778d8ced71e26fcf43c0866d7499891bca14c4368448a82edc61fdbc70"}, + {file = "protobuf-4.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:351cc90f7d10839c480aeb9b870a211e322bf05f6ab3f55fcb2f51331f80a7d2"}, + {file = "protobuf-4.23.4-cp39-cp39-win32.whl", hash = "sha256:6dd9b9940e3f17077e820b75851126615ee38643c2c5332aa7a359988820c720"}, + {file = "protobuf-4.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:0a5759f5696895de8cc913f084e27fd4125e8fb0914bb729a17816a33819f474"}, + {file = "protobuf-4.23.4-py3-none-any.whl", hash = "sha256:e9d0be5bf34b275b9f87ba7407796556abeeba635455d036c7351f7c183ef8ff"}, + {file = "protobuf-4.23.4.tar.gz", hash = "sha256:ccd9430c0719dce806b93f89c91de7977304729e55377f872a92465d548329a9"}, ] [[package]] name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -1314,7 +1271,6 @@ files = [ name = "pyarrow" version = "10.0.1" description = "Python library for Apache Arrow" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1352,7 +1308,6 @@ numpy = ">=1.16.6" name = "pycodestyle" version = "2.10.0" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1364,7 +1319,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1376,7 +1330,6 @@ files = [ name = "pyflakes" version = "3.0.1" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1386,14 +1339,13 @@ files = [ [[package]] name = "pygithub" -version = "1.58.2" +version = "1.59.0" description = "Use the full Github API v3" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "PyGithub-1.58.2-py3-none-any.whl", hash = "sha256:f435884af617c6debaa76cbc355372d1027445a56fbc39972a3b9ed4968badc8"}, - {file = "PyGithub-1.58.2.tar.gz", hash = "sha256:1e6b1b7afe31f75151fb81f7ab6b984a7188a852bdb123dbb9ae90023c3ce60f"}, + {file = "PyGithub-1.59.0-py3-none-any.whl", hash = "sha256:126bdbae72087d8d038b113aab6b059b4553cb59348e3024bb1a1cae406ace9e"}, + {file = "PyGithub-1.59.0.tar.gz", hash = "sha256:6e05ff49bac3caa7d1d6177a10c6e55a3e20c85b92424cc198571fd0cf786690"}, ] [package.dependencies] @@ -1406,7 +1358,6 @@ requests = ">=2.14.0" name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1419,14 +1370,13 @@ plugins = ["importlib-metadata"] [[package]] name = "pyjwt" -version = "2.7.0" +version = "2.8.0" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "PyJWT-2.7.0-py3-none-any.whl", hash = "sha256:ba2b425b15ad5ef12f200dc67dd56af4e26de2331f965c5439994dad075876e1"}, - {file = "PyJWT-2.7.0.tar.gz", hash = "sha256:bd6ca4a3c4285c1a2d4349e5a035fdf8fb94e04ccd0fcbe6ba289dae9cc3e074"}, + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, ] [package.dependencies] @@ -1442,7 +1392,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "2.17.4" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -1472,7 +1421,6 @@ testutils = ["gitpython (>3)"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1497,14 +1445,13 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pytest" -version = "7.3.1" +version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, ] [package.dependencies] @@ -1516,18 +1463,17 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" -version = "4.0.0" +version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] @@ -1541,7 +1487,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -1556,7 +1501,6 @@ six = ">=1.5" name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -1566,59 +1510,57 @@ files = [ [[package]] name = "pyyaml" -version = "6.0" +version = "6.0.1" description = "YAML parser and emitter for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] [[package]] name = "requests" version = "2.28.2" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7, <4" files = [ @@ -1636,16 +1578,26 @@ urllib3 = ">=1.21.1,<1.27" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "semver" +version = "3.0.1" +description = "Python helper for Semantic Versioning (https://semver.org)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "semver-3.0.1-py3-none-any.whl", hash = "sha256:2a23844ba1647362c7490fe3995a86e097bb590d16f0f32dfc383008f19e4cdf"}, + {file = "semver-3.0.1.tar.gz", hash = "sha256:9ec78c5447883c67b97f98c3b6212796708191d22e4ad30f4570f840171cbce1"}, +] + [[package]] name = "setuptools" -version = "67.8.0" +version = "68.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, - {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, ] [package.extras] @@ -1657,7 +1609,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1669,7 +1620,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -1681,7 +1631,6 @@ files = [ name = "sphinx" version = "5.3.0" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1717,7 +1666,6 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] name = "sphinx-autoapi" version = "2.1.0" description = "Sphinx API documentation generator" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1741,7 +1689,6 @@ go = ["sphinxcontrib-golangdomain"] name = "sphinx-autodoc-typehints" version = "1.22" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1761,7 +1708,6 @@ type-comment = ["typed-ast (>=1.5.4)"] name = "sphinx-rtd-theme" version = "1.2.0" description = "Read the Docs theme for Sphinx" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1781,7 +1727,6 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1797,7 +1742,6 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1813,7 +1757,6 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1829,7 +1772,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jquery" version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" -category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -1844,7 +1786,6 @@ Sphinx = ">=1.8" name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1859,7 +1800,6 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1875,7 +1815,6 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1891,7 +1830,6 @@ test = ["pytest"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1903,7 +1841,6 @@ files = [ name = "tomlkit" version = "0.11.8" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1915,7 +1852,6 @@ files = [ name = "tqdm" version = "4.65.0" description = "Fast, Extensible Progress Meter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1936,7 +1872,6 @@ telegram = ["requests"] name = "traitlets" version = "5.9.0" description = "Traitlets Python configuration system" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1950,21 +1885,19 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] [[package]] name = "types-decorator" -version = "5.1.8.3" +version = "5.1.8.4" description = "Typing stubs for decorator" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-decorator-5.1.8.3.tar.gz", hash = "sha256:32dd380fc88d0e7a1f27a84ba1ce6e29ba0ad42caaa1b88e7b5d27e61f6e4962"}, - {file = "types_decorator-5.1.8.3-py3-none-any.whl", hash = "sha256:2ad329af49b824db8069ebba9bf03b1cbcafba72eb338be255a1fd902e85edb9"}, + {file = "types-decorator-5.1.8.4.tar.gz", hash = "sha256:a8c39024634e99834bef146cec2e36c585f47884addf4dc65d6d0b7b1f627517"}, + {file = "types_decorator-5.1.8.4-py3-none-any.whl", hash = "sha256:e411203e0ec0116964dcd491e162a951e4a68cd2d4a946172690bee43c4ebe6e"}, ] [[package]] name = "types-docutils" version = "0.20.0.1" description = "Typing stubs for docutils" -category = "dev" optional = false python-versions = "*" files = [ @@ -1974,26 +1907,24 @@ files = [ [[package]] name = "types-protobuf" -version = "4.23.0.1" +version = "4.23.0.2" description = "Typing stubs for protobuf" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-protobuf-4.23.0.1.tar.gz", hash = "sha256:7bd5ea122a057b11a82b785d9de464932a1e9175fe977a4128adef11d7f35547"}, - {file = "types_protobuf-4.23.0.1-py3-none-any.whl", hash = "sha256:c926104f69ea62103846681b35b690d8d100ecf86c6cdda16c850a1313a272e4"}, + {file = "types-protobuf-4.23.0.2.tar.gz", hash = "sha256:1066b069d4f0e09bdebb64ca4f35cc6b8accf52f808368046ccec96744af0375"}, + {file = "types_protobuf-4.23.0.2-py3-none-any.whl", hash = "sha256:ee1a1ac8c099870075ef4457c01f3d56e189d8af1c13c3aa9f8dc207ca35337c"}, ] [[package]] name = "types-requests" -version = "2.30.0.0" +version = "2.31.0.2" description = "Typing stubs for requests" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-requests-2.30.0.0.tar.gz", hash = "sha256:dec781054324a70ba64430ae9e62e7e9c8e4618c185a5cb3f87a6738251b5a31"}, - {file = "types_requests-2.30.0.0-py3-none-any.whl", hash = "sha256:c6cf08e120ca9f0dc4fa4e32c3f953c3fba222bcc1db6b97695bce8da1ba9864"}, + {file = "types-requests-2.31.0.2.tar.gz", hash = "sha256:6aa3f7faf0ea52d728bb18c0a0d1522d9bfd8c72d26ff6f61bfc3d06a411cf40"}, + {file = "types_requests-2.31.0.2-py3-none-any.whl", hash = "sha256:56d181c85b5925cbc59f4489a57e72a8b2166f18273fd8ba7b6fe0c0b986f12a"}, ] [package.dependencies] @@ -2003,7 +1934,6 @@ types-urllib3 = "*" name = "types-setuptools" version = "65.7.0.4" description = "Typing stubs for setuptools" -category = "dev" optional = false python-versions = "*" files = [ @@ -2016,57 +1946,52 @@ types-docutils = "*" [[package]] name = "types-six" -version = "1.16.21.8" +version = "1.16.21.9" description = "Typing stubs for six" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-six-1.16.21.8.tar.gz", hash = "sha256:02a892ff8f423c4c5d15de7c6f4d433e643c863bcbefabd19251b478cbb284ab"}, - {file = "types_six-1.16.21.8-py3-none-any.whl", hash = "sha256:e118ebebb4944af96b4c022b15c0769c065af09126eb148b7797023e905e0652"}, + {file = "types-six-1.16.21.9.tar.gz", hash = "sha256:746e6c25b8c48b3c8ab9efe7f68022839111de423d35ba4b206b88b12d75f233"}, + {file = "types_six-1.16.21.9-py3-none-any.whl", hash = "sha256:1591a09430a3035326da5fdb71692d0b3cc36b25a440cc5929ca6241f3984705"}, ] [[package]] name = "types-typed-ast" -version = "1.5.8.6" +version = "1.5.8.7" description = "Typing stubs for typed-ast" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-typed-ast-1.5.8.6.tar.gz", hash = "sha256:9543b5863db97b412a2b1d5f407c908336365a0bad304d64e8328a769f48c230"}, - {file = "types_typed_ast-1.5.8.6-py3-none-any.whl", hash = "sha256:01edb7d635565fd27657a7e88c3cf204282e19a1fda4278b04eb4c7744a21de9"}, + {file = "types-typed-ast-1.5.8.7.tar.gz", hash = "sha256:f7795f6f9d597b35212314040b993f6613b51d81738edce3c1e3a3e9ef655124"}, + {file = "types_typed_ast-1.5.8.7-py3-none-any.whl", hash = "sha256:97bdd9b4228f96c6904a76e10a050305ddadb529bd35e4d8234711e09c41b543"}, ] [[package]] name = "types-urllib3" -version = "1.26.25.13" +version = "1.26.25.14" description = "Typing stubs for urllib3" -category = "dev" optional = false python-versions = "*" files = [ - {file = "types-urllib3-1.26.25.13.tar.gz", hash = "sha256:3300538c9dc11dad32eae4827ac313f5d986b8b21494801f1bf97a1ac6c03ae5"}, - {file = "types_urllib3-1.26.25.13-py3-none-any.whl", hash = "sha256:5dbd1d2bef14efee43f5318b5d36d805a489f6600252bb53626d4bfafd95e27c"}, + {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, + {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, ] [[package]] name = "typing-extensions" -version = "4.6.0" +version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.6.0-py3-none-any.whl", hash = "sha256:6ad00b63f849b7dcc313b70b6b304ed67b2b2963b3098a33efe18056b1a9a223"}, - {file = "typing_extensions-4.6.0.tar.gz", hash = "sha256:ff6b238610c747e44c268aa4bb23c8c735d665a63726df3f9431ce707f2aa768"}, + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] name = "unidecode" version = "1.3.6" description = "ASCII transliterations of Unicode text" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2076,14 +2001,13 @@ files = [ [[package]] name = "urllib3" -version = "1.26.15" +version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, - {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, + {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, + {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, ] [package.extras] @@ -2095,7 +2019,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -2107,7 +2030,6 @@ files = [ name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "main" optional = false python-versions = "*" files = [ @@ -2119,7 +2041,6 @@ files = [ name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -2202,21 +2123,20 @@ files = [ [[package]] name = "zipp" -version = "3.15.0" +version = "3.16.2" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, + {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "2.0" python-versions = "^3.8.2" -content-hash = "0c0aa4a0c159abfc4990871e7b105354939ee8d4773a24e7cb48a2254b402513" +content-hash = "1ea55e2b21e2fe3dba8f23ffe3ef9637caefacff4cb5ebb60db2374f58a52d23" diff --git a/clients/python/pyproject.toml b/clients/python/pyproject.toml index 3e9dd0678..ec5ed9ad4 100644 --- a/clients/python/pyproject.toml +++ b/clients/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "kaskada" -version = "0.5.1" +version = "0.5.2" description = "A client library for the Kaskada time travel machine learning service" authors = ["Kaskada "] license = "Apache-2.0" @@ -27,6 +27,7 @@ pygithub = "^1.57" pandas = "~1.3" ipython = "7.34.0" grpcio-health-checking = "^1.54.2" +semver = "^3.0.1" [tool.poetry.group.dev.dependencies] diff --git a/clients/python/src/kaskada/api/release.py b/clients/python/src/kaskada/api/release.py index 7687463ee..ce31c34e7 100644 --- a/clients/python/src/kaskada/api/release.py +++ b/clients/python/src/kaskada/api/release.py @@ -4,6 +4,7 @@ import shutil import sys from pathlib import Path +from typing import Optional import requests from github import Github @@ -41,15 +42,20 @@ def __init__(self) -> None: access_token = os.getenv(self.GITHUB_ACCESS_TOKEN_ENV) self._github = Github(access_token) - def download_latest_release( - self, download_path: Path, manager_bin_name: str, engine_bin_name: str + def download_release( + self, + download_path: Path, + manager_bin_name: str, + engine_bin_name: str, + engine_version: Optional[str] = None, ) -> LocalRelease: - """Downloads the latest version of the kaskada-manager and kaskada-engine services. + """Downloads the appropriate version of the kaskada-manager and kaskada-engine services. Args: download_path (Path): The local download path manager_bin_name (str): The name of the manager binary to save in download path engine_bin_name (str): The name of the engine binary to save in download path + engine_version (Optional[str]): The engine version to download, e.g., engine@v. Defaults to None for latest release. Raises: RuntimeError: unable to get release assets @@ -68,12 +74,18 @@ def download_latest_release( session.headers["Authorization"] = f"token {access_token}" repo = self._github.get_repo(f"{self.ORGANIZATION}/{self.REPO_NAME}") - latest_release = repo.get_latest_release() - logger.info(f"Using latest release version: {latest_release.tag_name}") - download_path = download_path / latest_release.tag_name + + if engine_version is None: + downloaded_release = repo.get_latest_release() + logger.info(f"Using latest release version: {downloaded_release.tag_name}") + else: + downloaded_release = repo.get_release(engine_version) + logger.info(f"Using release version: {downloaded_release.tag_name}") + + download_path = download_path / downloaded_release.tag_name download_path.mkdir(parents=True, exist_ok=True) logger.debug(f"Download path: {download_path}") - assets = latest_release.get_assets() + assets = downloaded_release.get_assets() manager_path, engine_path = (None, None) for asset in assets: @@ -113,7 +125,7 @@ def __download( Args: r (requests.Session): The request session - url (str): The targget URL + url (str): The target URL download_path (Path): The local path to stream write the file description (str): The description to render during download file_size (int, optional): The file size if known ahead of time. Defaults to response content size or 0. diff --git a/clients/python/src/kaskada/api/session.py b/clients/python/src/kaskada/api/session.py index 47d998da8..e24b7e4d3 100644 --- a/clients/python/src/kaskada/api/session.py +++ b/clients/python/src/kaskada/api/session.py @@ -1,10 +1,13 @@ import logging import os +import re from abc import ABC from datetime import datetime from pathlib import Path from typing import Any, Dict, List, Optional, Tuple +import semver + import kaskada.client from kaskada.api import release from kaskada.api.local_session.local_service import KaskadaLocalService @@ -146,11 +149,14 @@ class LocalBuilder(Builder): manager_configs: Dict[str, Any] engine_configs: Dict[str, Any] + # The engine version to download, e.g., `engine@v1.2.3-beta.1`. Defaults to None to get latest release. + _engine_version: Optional[str] def __init__( self, endpoint: str = kaskada.client.KASKADA_DEFAULT_ENDPOINT, is_secure: bool = kaskada.client.KASKADA_IS_SECURE, + engine_version: Optional[str] = None, ) -> None: super().__init__() self.manager_configs = {"-no-color": "1"} @@ -162,6 +168,7 @@ def __init__( self.in_memory(False) self.engine_configs: Dict[str, Any] = {"--log-no-color": "1"} self.keep_alive(True) + self._engine_version = engine_version def path(self, path: str): self._path = path @@ -221,6 +228,32 @@ def manager_grpc_port(self, port: int): self.manager_configs["-grpc-port"] = port return self + def engine_version(self, version: str): + """Set a specific version of the engine to download. The version must be in the format `engine@v`. + + Args: + version (str): The git tag for a release on the kaskada repo + + Raises: + ValueError: When an invalid version if provided. Note this does not check to the version's existence on Github. + + Returns: + LocalBuilder: Updated instance of LocalBuilder with the engine version set. + """ + error_message = "invalid version provided. See https://github.com/kaskada-ai/kaskada/tags for valid values." + match = re.search(r"^engine@v(.*)$", version) + if match: + sem_ver = match.group(1) + try: + semver.Version.parse(sem_ver) + except: + raise ValueError(error_message) + else: + raise ValueError(error_message) + + self._engine_version = version + return self + def __get_log_path(self, file_name: str) -> Path: if self._path is None: raise ValueError("no path provided and KASKADA_PATH was not set") @@ -274,25 +307,30 @@ def __get_local_services(self) -> Tuple[KaskadaLocalService, KaskadaLocalService ) return (manager_service, engine_service) - def __download_latest_release(self): - """Downloads the latest release version to the binary path.""" + def __download_release(self, engine_version: Optional[str] = None): + """Downloads a kaskada release version to the binary path. + + Args: + engine_version (Optional[str]): The engine version to download, e.g., engine@v. Defaults to None for latest release. + """ client = release.ReleaseClient() download_path = self.__get_binary_path() download_path.mkdir(parents=True, exist_ok=True) - local_release = client.download_latest_release( + local_release = client.download_release( download_path, KASKADA_MANAGER_BIN_NAME_DEFAULT, KASKADA_ENGINE_BIN_NAME_DEFAULT, + engine_version, ) logger.debug(f"Download Path: {local_release._download_path}") logger.debug(f"Manager Path: {local_release._manager_path}") logger.debug(f"Engine Path: {local_release._engine_path}") # Update the binary path to the path downloaded and saved to by the latest release downloader. - self.bin_path( - local_release._download_path.absolute().relative_to( - Path(self._path).expanduser().absolute() - ) + full_path = local_release._download_path.absolute().relative_to( + Path(self._path).expanduser().absolute() ) + self.bin_path(str(full_path)) + os.chmod(local_release._manager_path, 0o755) os.chmod(local_release._engine_path, 0o755) @@ -322,7 +360,7 @@ def build(self) -> LocalSession: ) if self._download: - self.__download_latest_release() + self.__download_release(self._engine_version) manager_process, engine_process = self.__get_local_services() session = LocalSession( diff --git a/clients/python/src/kaskada/formatters.py b/clients/python/src/kaskada/formatters.py index 2a19024c8..2d869a3f8 100644 --- a/clients/python/src/kaskada/formatters.py +++ b/clients/python/src/kaskada/formatters.py @@ -58,14 +58,6 @@ def get_query_response_content(resp_obj): path = resp_obj.file_results.paths[i] appendChildIfNotNone(nested_table, tr(td(pre(i)), td(a(path, _href=path)))) resultsExist = True - elif hasattr(resp_obj, "redis_bulk"): - nested_table = table(_class="kda_table") - details.appendChild(html_table_row("redis_bulk", nested_table)) - nested_table.appendChild(tr(th("index"), th("path"))) - for i in range(len(resp_obj.redis_bulk.paths)): - path = resp_obj.redis_bulk.paths[i] - appendChildIfNotNone(nested_table, tr(td(pre(i)), td(a(path, _href=path)))) - resultsExist = True ( can_execute, @@ -689,13 +681,6 @@ def try_init(): html_formatter.for_type( "kaskada.materialization.MaterializationView", generic_object_html_formatter ) - html_formatter.for_type( - "kaskada.materialization.RedisAIDestination", generic_object_html_formatter - ) - html_formatter.for_type( - "kaskada.kaskada.v1alpha.materialization_service_pb2.RedisAI", - generic_object_html_formatter, - ) html_formatter.for_type( "kaskada.kaskada.v1alpha.materialization_service_pb2.Destination", generic_object_html_formatter, diff --git a/clients/python/src/kaskada/formatters_shared.py b/clients/python/src/kaskada/formatters_shared.py index a90405156..beada1808 100644 --- a/clients/python/src/kaskada/formatters_shared.py +++ b/clients/python/src/kaskada/formatters_shared.py @@ -123,20 +123,6 @@ def get_materialization_html_and_schema_df(obj): if hasattr(obj, "destination") and obj.HasField("destination"): destination = table(_class="kda_table") details.appendChild(html_table_row("destination", destination)) - if hasattr(obj.destination, "redis_a_i") and obj.destination.HasField( - "redis_a_i" - ): - redis_a_i = table(_class="kda_table") - appendHtmlObjTableRowIfAttrExists( - redis_a_i, obj.destination.redis_a_i, "host" - ) - appendHtmlObjTableRowIfAttrExists( - redis_a_i, obj.destination.redis_a_i, "port" - ) - appendHtmlObjTableRowIfAttrExists( - redis_a_i, obj.destination.redis_a_i, "db" - ) - destination.appendChild(html_table_row("redis_a_i", redis_a_i)) if hasattr(obj, "slice"): details.appendChild(html_table_row("slice", get_slice_request_html(obj.slice))) diff --git a/clients/python/src/kaskada/materialization.py b/clients/python/src/kaskada/materialization.py index cad04a835..1d6a0ae3c 100644 --- a/clients/python/src/kaskada/materialization.py +++ b/clients/python/src/kaskada/materialization.py @@ -23,44 +23,6 @@ def to_request(self) -> Dict[str, Any]: pass -class RedisDestination(Destination): - def __init__( - self, - host_name: str, - port: int, - use_tls: bool, - database_number: int, - password: str, - tls_cert: str, - tls_key: str, - tls_ca_cert: str, - insecure_skip_verify: bool, - ) -> None: - super().__init__() - self._host_name = host_name - self._port = port - self._use_tls = use_tls - self._database_number = database_number - self._password = password - self._tls_cert = tls_cert - self._tls_key = tls_key - self._tls_ca_cert = tls_ca_cert - self._insecure_skip_verify = insecure_skip_verify - - def to_request(self) -> Dict[str, Any]: - return { - "host_name": self._host_name, - "port": self._port, - "use_tls": self._use_tls, - "database_number": self._database_number, - "password": self._password, - "tls_cert": self._tls_cert, - "tls_key": self._tls_key, - "tls_ca_cert": self._tls_ca_cert, - "insecure_skip_verify": self._insecure_skip_verify, - } - - class FileType(Enum): FILE_TYPE_UNSPECIFIED = 0 FILE_TYPE_PARQUET = 1 @@ -160,8 +122,6 @@ def create_materialization( } if isinstance(destination, ObjectStoreDestination): materialization["destination"] = {"object_store": destination.to_request()} - elif isinstance(destination, RedisDestination): - materialization["destination"] = {"redis": destination.to_request()} elif isinstance(destination, PulsarDestination): materialization["destination"] = { "pulsar": {"config": destination.to_request()} diff --git a/clients/python/tests/api/test_session.py b/clients/python/tests/api/test_session.py index f53c4d112..a8e64ca02 100644 --- a/clients/python/tests/api/test_session.py +++ b/clients/python/tests/api/test_session.py @@ -78,6 +78,18 @@ def test_local_builder_defaults(): assert builder.engine_configs == {"--log-no-color": "1"} +def test_local_builder_set_engine_version(): + version = "engine@v0.0.1-beta.1" + builder = kaskada.api.session.LocalBuilder().engine_version(version) + assert builder._engine_version == version + +def test_local_builder_set_engine_version_throws(): + invalid_versions = ["0.0.1", "engine@0.0.1", "engine@v23", "manager@v1.1.1"] + + for version in invalid_versions: + with pytest.raises(ValueError): + builder = kaskada.api.session.LocalBuilder().engine_version(version) + def test_local_builder_set_path_sets_path(): path = "my_path" builder = kaskada.api.session.LocalBuilder().path(path) diff --git a/clients/python/tests/test_materialization.py b/clients/python/tests/test_materialization.py index 1dbfce673..04c537bb7 100644 --- a/clients/python/tests/test_materialization.py +++ b/clients/python/tests/test_materialization.py @@ -11,7 +11,6 @@ MaterializationView, ObjectStoreDestination, PulsarDestination, - RedisDestination, create_materialization, delete_materialization, get_materialization, @@ -21,23 +20,6 @@ from kaskada.slice_filters import EntityFilter -def test_redis_destination_to_request(): - params = { - "host_name": "my_host_name", - "port": 1234, - "use_tls": False, - "database_number": 4321, - "password": "my_password", - "tls_cert": "my_tls_cert", - "tls_key": "my_tls_key", - "tls_ca_cert": "my_tls_ca_cert", - "insecure_skip_verify": True, - } - - result = RedisDestination(**params) - assert result.to_request() == params - - def test_object_store_destination_to_request(): csv_file = FileType.FILE_TYPE_CSV output_prefix = "/my_prefix" @@ -218,7 +200,7 @@ def to_request(self) -> Dict[str, Any]: entity_keys: "my_entity_b" } } - + """ @@ -300,54 +282,6 @@ def test_create_materialization_object_store_parquet_destination(mockClient): ) -@patch("kaskada.client.Client") -def test_create_materialization_redis_destination(mockClient): - params = { - "host_name": "my_host_name", - "port": 1234, - "use_tls": False, - "database_number": 4321, - "password": "my_password", - "tls_cert": "my_tls_cert", - "tls_key": "my_tls_key", - "tls_ca_cert": "my_tls_ca_cert", - "insecure_skip_verify": True, - } - - redis_destination = RedisDestination(**params) - - name = "my_awkward_tacos" - expression = "last(tacos)" - destination = redis_destination - views = [MaterializationView("my_second_view", "last(awkward)")] - slice_filter = EntityFilter(["my_entity_a", "my_entity_b"]) - - expected_request = material_pb.CreateMaterializationRequest( - **{ - "materialization": { - "materialization_name": name, - "expression": expression, - "with_views": [ - {"name": "my_second_view", "expression": "last(awkward)"} - ], - "destination": {"redis": redis_destination.to_request()}, - "slice": slice_filter.to_request(), - } - } - ) - create_materialization( - name, - expression, - destination, - views, - slice_filter=slice_filter, - client=mockClient, - ) - mockClient.materialization_stub.CreateMaterialization.assert_called_with( - expected_request, metadata=mockClient.get_metadata() - ) - - @patch("kaskada.client.Client") def test_create_materialization_astra_streaming_destination(mockClient): params = { diff --git a/clients/timestreams/README.md b/clients/timestreams/README.md new file mode 100644 index 000000000..17eb7486e --- /dev/null +++ b/clients/timestreams/README.md @@ -0,0 +1,17 @@ +# Kaskada Timestreams + +## Developer Instructions +The package uses Poetry to develop and build. + +1. Install Pyenv [Pyenv Documentation](https://github.com/pyenv/pyenv) +1. Install Python 3.9.16: `$ pyenv install 3.9.16` +1. Install Poetry [Poetry Documentation](https://python-poetry.org/docs/) +1. Install dependences: `$ poetry install` + +#### Build the Package +To build the client: `$ poetry build` + +#### Publishing the Package to PyPi +* build the package (see above) +* set the `POETRY_PYPI_TOKEN_PYPI` env var in your environment +* from the `./clients` folder:, run `$ docker compose run push-timestreams` diff --git a/clients/timestreams/poetry.lock b/clients/timestreams/poetry.lock new file mode 100644 index 000000000..9fda33e82 --- /dev/null +++ b/clients/timestreams/poetry.lock @@ -0,0 +1,7 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +package = [] + +[metadata] +lock-version = "2.0" +python-versions = "^3.8.2" +content-hash = "2fcdaa22403ff95bf777bfbeeea337c1a68e82a9a7598b18b0629813be440fdd" diff --git a/clients/timestreams/pyproject.toml b/clients/timestreams/pyproject.toml new file mode 100644 index 000000000..6c919784a --- /dev/null +++ b/clients/timestreams/pyproject.toml @@ -0,0 +1,14 @@ +[tool.poetry] +name = "timestreams" +version = "0.0.1a1" +description = "A placeholder for a future kaskada project" +authors = ["Kaskada "] +license = "Apache-2.0" +readme = "README.md" +packages = [ + { include = "timestreams", from = "src" }, +] +include = ["**/src/timestreams/timestreams/**/*.py"] + +[tool.poetry.dependencies] +python = "^3.8.2" diff --git a/clients/timestreams/src/timestreams/__init__.py b/clients/timestreams/src/timestreams/__init__.py new file mode 100644 index 000000000..fc9a4e5d4 --- /dev/null +++ b/clients/timestreams/src/timestreams/__init__.py @@ -0,0 +1,2 @@ +"""Kaskada Compute Engine +""" diff --git a/crates/sparrow-api/src/kaskada/v1alpha/schema_traits.rs b/crates/sparrow-api/src/kaskada/v1alpha/schema_traits.rs index 762a95a99..e444bb23e 100644 --- a/crates/sparrow-api/src/kaskada/v1alpha/schema_traits.rs +++ b/crates/sparrow-api/src/kaskada/v1alpha/schema_traits.rs @@ -24,7 +24,7 @@ impl DataType { let value = &fields[1]; Self { kind: Some(data_type::Kind::Map(Box::new(data_type::Map { - name: name.to_string(), + name: name.to_owned(), ordered, key_name: key.name.clone(), key_type: Some(Box::new( @@ -44,6 +44,22 @@ impl DataType { } } + pub fn new_list(name: &str, field: schema::Field) -> Self { + Self { + kind: Some(data_type::Kind::List(Box::new(data_type::List { + name: name.to_owned(), + item_type: Some(Box::new( + field + .data_type + .as_ref() + .expect("data type to exist") + .clone(), + )), + nullable: field.nullable, + }))), + } + } + pub fn new_primitive(primitive: data_type::PrimitiveType) -> Self { Self { kind: Some(data_type::Kind::Primitive(primitive as i32)), @@ -221,6 +237,21 @@ impl TryFrom<&arrow::datatypes::DataType> for DataType { Ok(DataType::new_map(s.name(), *is_ordered, vec![key, value])) } + arrow::datatypes::DataType::List(field) => { + let name = field.name(); + let field = schema::Field { + name: name.to_owned(), + data_type: Some(field.data_type().try_into().map_err( + |err: ConversionError| { + err.with_prepend_field("list item".to_owned()) + }, + )?), + nullable: field.is_nullable(), + }; + + Ok(DataType::new_list(name, field)) + } + unsupported => Err(ConversionError::new_unsupported(unsupported.clone())), } } @@ -337,12 +368,16 @@ impl TryFrom<&DataType> for arrow::datatypes::DataType { Some(data_type::Kind::Struct(schema)) => Ok(arrow::datatypes::DataType::Struct( fields_to_arrow(&schema.fields)?.into(), )), - Some(data_type::Kind::List(item_type)) => { - let item_type = arrow::datatypes::DataType::try_from(item_type.as_ref()) - .map_err(|e| e.with_prepend_field("list item".to_owned()))?; - let item_type = arrow::datatypes::Field::new("item", item_type, true); - Ok(arrow::datatypes::DataType::List(Arc::new(item_type))) - } + Some(data_type::Kind::List(list)) => match list.item_type.as_ref() { + Some(item_type) => { + let item_type = arrow::datatypes::DataType::try_from(item_type.as_ref()) + .map_err(|e| e.with_prepend_field("list item".to_owned()))?; + let item_type = + arrow::datatypes::Field::new(list.name.clone(), item_type, list.nullable); + Ok(arrow::datatypes::DataType::List(Arc::new(item_type))) + } + None => Err(ConversionError::new_unsupported(value.clone())), + }, Some(data_type::Kind::Map(map)) => { match (map.key_type.as_ref(), map.value_type.as_ref()) { (Some(key), Some(value)) => { diff --git a/crates/sparrow-arrow/src/batch.rs b/crates/sparrow-arrow/src/batch.rs index 67ffd7dc1..572060462 100644 --- a/crates/sparrow-arrow/src/batch.rs +++ b/crates/sparrow-arrow/src/batch.rs @@ -73,6 +73,15 @@ impl Batch { } } + /// Create a new `Batch` containing the given batch data. + /// + /// `time`, `subsort` and `key_hash` are references to the key columns. + /// They may be references to columns within the batch or not. + /// + /// `up_to_time` is a [RowTime] such that: + /// (a) all rows so far are less than or equal to `up_to_time` + /// (b) all rows in this batch or less than or equal to `up_to_time` + /// (c) all future rows are greater than `up_to_time`. pub fn new_with_data( batch: RecordBatch, time: ArrayRef, diff --git a/crates/sparrow-arrow/src/concat_take.rs b/crates/sparrow-arrow/src/concat_take.rs index 784616231..ad5f28b5a 100644 --- a/crates/sparrow-arrow/src/concat_take.rs +++ b/crates/sparrow-arrow/src/concat_take.rs @@ -10,6 +10,6 @@ pub fn concat_take( array2: &ArrayRef, indices: &UInt32Array, ) -> anyhow::Result { - let combined = arrow::compute::concat(&[array1, array2])?; - Ok(arrow::compute::take(&combined, indices, None)?) + let combined = arrow_select::concat::concat(&[array1, array2])?; + Ok(arrow_select::take::take(&combined, indices, None)?) } diff --git a/crates/sparrow-arrow/src/downcast.rs b/crates/sparrow-arrow/src/downcast.rs index ce0eabada..39b263ddc 100644 --- a/crates/sparrow-arrow/src/downcast.rs +++ b/crates/sparrow-arrow/src/downcast.rs @@ -2,8 +2,7 @@ use anyhow::Context; use arrow::array::{ - Array, BooleanArray, GenericStringArray, ListArray, OffsetSizeTrait, PrimitiveArray, - StructArray, + Array, BooleanArray, GenericStringArray, OffsetSizeTrait, PrimitiveArray, StructArray, }; use arrow::datatypes::ArrowPrimitiveType; use arrow_array::MapArray; @@ -24,13 +23,6 @@ pub fn downcast_primitive_array( }) } -pub fn downcast_list_array(array: &dyn Array) -> anyhow::Result<&ListArray> { - array - .as_any() - .downcast_ref::() - .with_context(|| format!("Unable to downcast {:?} to ListArray", array.data_type())) -} - /// Downcast an array into a string array. pub fn downcast_string_array(array: &dyn Array) -> anyhow::Result<&GenericStringArray> where diff --git a/crates/sparrow-arrow/src/hash.rs b/crates/sparrow-arrow/src/hash.rs index cf3e9e14e..5808d2651 100644 --- a/crates/sparrow-arrow/src/hash.rs +++ b/crates/sparrow-arrow/src/hash.rs @@ -1,96 +1,43 @@ //! Provides a kernel for hashing an arbitrary Arrow array to a UInt64Array. -use crate::downcast::{downcast_boolean_array, downcast_primitive_array, downcast_string_array}; -use anyhow::anyhow; -use arrow::array::{Array, OffsetSizeTrait, UInt64Array}; -use arrow::datatypes::{ - ArrowPrimitiveType, DataType, Int16Type, Int32Type, Int64Type, Int8Type, UInt16Type, - UInt32Type, UInt64Type, -}; +use std::cell::RefCell; + +use crate::hasher::Hasher; +use arrow::array::{Array, UInt64Array}; +use arrow::datatypes::DataType; pub fn can_hash(data_type: &DataType) -> bool { - matches!( - data_type, + match data_type { + primitive if primitive.is_primitive() => true, DataType::Null - | DataType::Boolean - | DataType::Int8 - | DataType::Int16 - | DataType::Int32 - | DataType::Int64 - | DataType::UInt16 - | DataType::UInt32 - | DataType::UInt64 - | DataType::Utf8 - | DataType::LargeUtf8 - ) -} - -/// Return an `ArrayRef` to a `UInt64Array` containing the hash of each row of -/// the array. -pub fn hash(array: &dyn Array) -> anyhow::Result { - match array.data_type() { - DataType::Null => { - // The null array contains only null values. Hash that to an array of 0. - Ok(UInt64Array::from(vec![0; array.len()])) - } - DataType::Boolean => hash_boolean(array), - DataType::Int8 => hash_primitive::(array), - DataType::Int16 => hash_primitive::(array), - DataType::Int32 => hash_primitive::(array), - DataType::Int64 => hash_primitive::(array), - DataType::UInt16 => hash_primitive::(array), - DataType::UInt32 => hash_primitive::(array), - DataType::UInt64 => hash_primitive::(array), - DataType::Utf8 => hash_string::(array), - DataType::LargeUtf8 => hash_string::(array), - todo => Err(anyhow!("Hashing of type {:?}", todo)), + | DataType::Boolean + | DataType::Utf8 + | DataType::LargeUtf8 + | DataType::Binary + | DataType::LargeBinary + | DataType::FixedSizeBinary(_) + | DataType::Decimal128(_, _) + | DataType::Decimal256(_, _) => true, + DataType::Dictionary(_, value) => can_hash(value), + DataType::Struct(fields) => fields.iter().all(|f| can_hash(f.data_type())), + _ => false, } } -fn fixed_seed_hasher() -> ahash::random_state::RandomState { - ahash::random_state::RandomState::with_seeds(1234, 5678, 9012, 3456) -} - -fn hash_string(array: &dyn Array) -> anyhow::Result -where - T: OffsetSizeTrait, -{ - let string_array = downcast_string_array::(array)?; - - let mut builder = UInt64Array::builder(array.len()); - - for string in string_array { - builder.append_value(fixed_seed_hasher().hash_one(string)); - } - - Ok(builder.finish()) +thread_local! { + /// Thread-local hasher. + /// + /// TODO: Move this to the hasher and make it easy to automatically + /// use this instance. + static HASHER: RefCell = RefCell::new(Hasher::default()); } -fn hash_primitive(array: &dyn Array) -> anyhow::Result -where - T: ArrowPrimitiveType, - T::Native: std::hash::Hash, -{ - let primitive_array = downcast_primitive_array::(array)?; - - let mut builder = UInt64Array::builder(array.len()); - - for primitive in primitive_array { - builder.append_value(fixed_seed_hasher().hash_one(primitive)); - } - - Ok(builder.finish()) -} - -fn hash_boolean(array: &dyn Array) -> anyhow::Result { - let boolean_array = downcast_boolean_array(array)?; - - let mut builder = UInt64Array::builder(array.len()); - - for boolean in boolean_array { - builder.append_value(fixed_seed_hasher().hash_one(boolean)); - } - - Ok(builder.finish()) +/// Return an `ArrayRef` to a `UInt64Array` containing the hash of each row of +/// the array. +pub fn hash(array: &dyn Array) -> error_stack::Result { + HASHER.with(|hasher| { + let mut hasher = hasher.borrow_mut(); + hasher.hash_to_uint64(array) + }) } #[cfg(test)] @@ -134,11 +81,7 @@ mod tests { let hashes = hash(&array).unwrap(); assert_eq!( hashes.values(), - &[ - 13572866306152653102, - 11832085162654999889, - 16979493163667785006 - ] + &[1472103086483932002, 0, 8057155968893317866] ); } } diff --git a/crates/sparrow-arrow/src/hasher.rs b/crates/sparrow-arrow/src/hasher.rs index f8790a938..44ae94da0 100644 --- a/crates/sparrow-arrow/src/hasher.rs +++ b/crates/sparrow-arrow/src/hasher.rs @@ -7,11 +7,12 @@ use arrow_array::cast::{ }; use arrow_array::types::{ArrowDictionaryKeyType, Decimal128Type, Decimal256Type}; use arrow_array::{ - downcast_dictionary_array, downcast_primitive_array, Array, ArrayAccessor, BooleanArray, - DictionaryArray, FixedSizeBinaryArray, + downcast_dictionary_array, downcast_primitive_array, Array, ArrayAccessor, ArrayRef, + BooleanArray, DictionaryArray, FixedSizeBinaryArray, UInt64Array, }; -use arrow_buffer::{i256, ArrowNativeType, NullBuffer}; +use arrow_buffer::{i256, ArrowNativeType, Buffer, NullBuffer}; use arrow_schema::DataType; +use error_stack::{IntoReport, ResultExt}; pub struct Hasher { random_state: RandomState, @@ -34,25 +35,33 @@ pub enum Error { NoArraysToHash, #[display(fmt = "hash of '{_0:?}' unsupported")] UnsupportedType(DataType), + #[display(fmt = "unable to create resulting UInt64Array")] + FailedToCreate, } impl error_stack::Context for Error {} impl Hasher { - pub fn hash_array( - &mut self, - array: impl std::borrow::Borrow, - ) -> error_stack::Result<&[u64], Error> { - self.hash_arrays(&[array.borrow()]) + pub fn hash_to_uint64(&mut self, array: &dyn Array) -> error_stack::Result { + let hashes = self.hash_array(array)?; + let hashes = Buffer::from_slice_ref(hashes); + UInt64Array::try_new(hashes.into(), None) + .into_report() + .change_context(Error::FailedToCreate) + } + + pub fn hash_array(&mut self, array: &dyn Array) -> error_stack::Result<&[u64], Error> { + self.hash_arrays(std::iter::once(array)) } - pub fn hash_arrays( + pub fn hash_arrays<'a>( &mut self, - arrays: &[impl std::borrow::Borrow], + arrays: impl Iterator + 'a, ) -> error_stack::Result<&[u64], Error> { - error_stack::ensure!(!arrays.is_empty(), Error::NoArraysToHash); + let mut arrays = arrays.peekable(); + error_stack::ensure!(arrays.peek().is_some(), Error::NoArraysToHash); - let num_rows = arrays[0].borrow().len(); + let num_rows = arrays.peek().unwrap().len(); self.hash_buffer.clear(); self.hash_buffer.resize(num_rows, 0); @@ -74,15 +83,14 @@ impl Hasher { /// The number of rows to hash is determined by `hashes_buffer.len()`. /// `hashes_buffer` should be pre-sized appropriately #[allow(clippy::ptr_arg)] -fn create_hashes( - arrays: &[impl std::borrow::Borrow], +fn create_hashes<'a>( + arrays: impl Iterator + 'a, random_state: &RandomState, hashes_buffer: &mut Vec, mask: Option<&NullBuffer>, mut multi_col: bool, ) -> error_stack::Result<(), Error> { - for col in arrays { - let array = col.borrow(); + for array in arrays { downcast_primitive_array! { array => hash_array(array, random_state, hashes_buffer, mask, multi_col), DataType::Null => hash_null(random_state, hashes_buffer, multi_col), @@ -115,7 +123,7 @@ fn create_hashes( } let mask = NullBuffer::union(mask, array.nulls()); - create_hashes(array.columns(), random_state, hashes_buffer, mask.as_ref(), true)?; + create_hashes(iterate_array_refs(array.columns()), random_state, hashes_buffer, mask.as_ref(), true)?; } unsupported => { // This is internal because we should have caught this before. @@ -129,6 +137,10 @@ fn create_hashes( Ok(()) } +fn iterate_array_refs(arrays: &[ArrayRef]) -> impl Iterator { + arrays.iter().map(|a| a.as_ref()) +} + #[inline] fn combine_hashes(l: u64, r: u64) -> u64 { let hash = (17 * 37u64).wrapping_add(l); @@ -260,7 +272,13 @@ fn hash_dictionary( // redundant hashing for large dictionary elements (e.g. strings) let values = array.values().clone(); let mut dict_hashes = vec![0; values.len()]; - create_hashes(&[values], random_state, &mut dict_hashes, None, false)?; + create_hashes( + [values.as_ref()].into_iter(), + random_state, + &mut dict_hashes, + None, + false, + )?; // combine hash for each index in values match (mask, multi_col) { diff --git a/crates/sparrow-backend/src/pipeline_schedule.rs b/crates/sparrow-backend/src/pipeline_schedule.rs index 324909e47..ed6930c79 100644 --- a/crates/sparrow-backend/src/pipeline_schedule.rs +++ b/crates/sparrow-backend/src/pipeline_schedule.rs @@ -94,6 +94,7 @@ mod tests { let steps = index_vec::index_vec![ // 0: scan table1 Step { + id: 0.into(), kind: StepKind::Scan { table_name: "table1".to_owned(), }, @@ -102,6 +103,7 @@ mod tests { }, // 1: scan table2 Step { + id: 1.into(), kind: StepKind::Scan { table_name: "table2".to_owned(), }, @@ -110,12 +112,14 @@ mod tests { }, // 2: merge 0 and 1 Step { + id: 2.into(), kind: StepKind::Merge, inputs: vec![0.into(), 1.into()], schema: schema.clone(), }, // 3: project 0 -> separate pipeline since 0 has 2 consumers Step { + id: 3.into(), kind: StepKind::Project { exprs: Exprs::empty(), }, @@ -124,6 +128,7 @@ mod tests { }, // 4: project 2 -> same pipeline since only consumer Step { + id: 4.into(), kind: StepKind::Project { exprs: Exprs::empty(), }, @@ -132,6 +137,7 @@ mod tests { }, // 5: merge 3 and 4 -> new pipeline since merge Step { + id: 5.into(), kind: StepKind::Merge, inputs: vec![3.into(), 4.into()], schema, diff --git a/crates/sparrow-catalog/catalog/add.toml b/crates/sparrow-catalog/catalog/add.toml index cd3017caa..2a40d249b 100644 --- a/crates/sparrow-catalog/catalog/add.toml +++ b/crates/sparrow-catalog/catalog/add.toml @@ -1,8 +1,8 @@ -name = 'add' -signature = 'add(a: number, b: number) -> number' -operator = 'a + b' -short_doc = 'Returns the sum of two numbers.' -long_doc = ''' +name = "add" +signature = "add(a: number, b: number) -> number" +operator = "a + b" +short_doc = "Returns the sum of two numbers." +long_doc = """ This is the function used for the binary operation `a + b`. ### Parameters @@ -16,28 +16,28 @@ following the [numeric type coercion rules](docs:data-model#numeric-type-coercio Returns a numeric column of the promoted numeric type compatible with both `a` and `b`. The result contains `null` if `a` or `b` was null at that row. Otherwise the row contains the sum of `a` and `b`. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Addition' -description = ''' +name = "Addition" +description = """ In this example, `a` is an integer column (defaulting to `i64`) and `b` is a floating point column (defaulting to `f64`). The result is a floating point column, achieved by implicitly converting `a` to `f64`. -''' -expression = 'Input.a + Input.b' -input_csv = ''' +""" +expression = "Input.a + Input.b" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,A,5,1.2 2021-01-02T00:00:00.000000000Z,A,6.3,0.4 2021-03-01T00:00:00.000000000Z,B,,3.7 2021-04-10T00:00:00.000000000Z,A,13, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,A,5.0,1.2,6.2 2021-01-02T00:00:00.000000000,A,6.3,0.4,6.7 2021-03-01T00:00:00.000000000,B,,3.7, 2021-04-10T00:00:00.000000000,A,13.0,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/add_time.toml b/crates/sparrow-catalog/catalog/add_time.toml index 7f28c7fd0..c04a4a6e4 100644 --- a/crates/sparrow-catalog/catalog/add_time.toml +++ b/crates/sparrow-catalog/catalog/add_time.toml @@ -1,7 +1,7 @@ -name = 'add_time' -signature = 'add_time(delta: timedelta, time: timestamp_ns) -> timestamp_ns' -short_doc = 'Adds a `timedelta` (duration or interval) to a time.' -long_doc = ''' +name = "add_time" +signature = "add_time(delta: timedelta, time: timestamp_ns) -> timestamp_ns" +short_doc = "Adds a `timedelta` (duration or interval) to a time." +long_doc = """ ### Parameters * delta: The time delta to add to the timestamp. See other [time functions](#time-functions) for how to create `timedelta`s. @@ -11,17 +11,17 @@ long_doc = ''' Returns a time column with each row containing the value of `time` for that row plus the given `delta`. If either the `delta` or `time` are `null` then the result is `null` in that row. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Adding a fixed number of days' -description = ''' +name = "Adding a fixed number of days" +description = """ This example uses [`days`](#days) to create a fixed `interval_days` to add to a given date. -''' -expression = 'Input.time | add_time(days(3))' -input_csv = ''' +""" +expression = "Input.time | add_time(days(3))" +input_csv = """ time,key 1996-03-21T00:00:00-00:00,Ben 1996-04-21T00:00:00-00:00,Ryan @@ -29,8 +29,8 @@ time,key 1996-06-21T00:00:00-00:00,Ryan 1996-07-21T00:00:00-00:00,Ben 1996-08-21T00:00:00-00:00,Ben -''' -output_csv = ''' +""" +output_csv = """ time,key,result 1996-03-21T00:00:00.000000000,Ben,1996-03-24T00:00:00.000000000 1996-04-21T00:00:00.000000000,Ryan,1996-04-24T00:00:00.000000000 @@ -38,4 +38,4 @@ time,key,result 1996-06-21T00:00:00.000000000,Ryan,1996-06-24T00:00:00.000000000 1996-07-21T00:00:00.000000000,Ben,1996-07-24T00:00:00.000000000 1996-08-21T00:00:00.000000000,Ben,1996-08-24T00:00:00.000000000 -''' +""" diff --git a/crates/sparrow-catalog/catalog/ceil.toml b/crates/sparrow-catalog/catalog/ceil.toml index a885de75c..e5cd0830d 100644 --- a/crates/sparrow-catalog/catalog/ceil.toml +++ b/crates/sparrow-catalog/catalog/ceil.toml @@ -1,7 +1,7 @@ -name = 'ceil' -signature = 'ceil(n: number) -> number' -short_doc = 'Rounds the number up to the next largest integer.' -long_doc = ''' +name = "ceil" +signature = "ceil(n: number) -> number" +short_doc = "Rounds the number up to the next largest integer." +long_doc = """ See also [`round`](#round) and [`floor`](#floor). ### Parameters @@ -14,23 +14,23 @@ Note: This method may be applied to any numeric type. For anything other than Returns a numeric column of the same type as `n`. The result contains `null` if `n` was null at that position. Otherwise, it contains the result of rounding `n` up to the next largest integer. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Ceil' -expression = 'Input.a | ceil()' -input_csv = ''' +name = "Ceil" +expression = "Input.a | ceil()" +input_csv = """ time,key,a 2021-01-01T00:00:00.000000000Z,A,5.7 2021-01-01T00:00:00.000000000Z,A,6.3 2021-01-02T00:00:00.000000000Z,B, 2021-01-02T00:00:00.000000000Z,B,-2.3 -''' -output_csv = ''' +""" +output_csv = """ time,key,a,result 2021-01-01T00:00:00.000000000,A,5.7,6.0 2021-01-01T00:00:00.000000000,A,6.3,7.0 2021-01-02T00:00:00.000000000,B,, 2021-01-02T00:00:00.000000000,B,-2.3,-2.0 -''' +""" diff --git a/crates/sparrow-catalog/catalog/clamp.toml b/crates/sparrow-catalog/catalog/clamp.toml index 8a480af6d..ff07a0f23 100644 --- a/crates/sparrow-catalog/catalog/clamp.toml +++ b/crates/sparrow-catalog/catalog/clamp.toml @@ -1,7 +1,7 @@ -name = 'clamp' -signature = 'clamp(value: number, min: number = null, max: number = null) -> number' -short_doc = 'Returns `value` clamped between the bounds `min` and `max`.' -long_doc = ''' +name = "clamp" +signature = "clamp(value: number, min: number = null, max: number = null) -> number" +short_doc = "Returns `value` clamped between the bounds `min` and `max`." +long_doc = """ ### Parameters * `value`: The value to be clamped. * `min`: The minimum bound. If `null`, no minimum bound will be applied. @@ -20,48 +20,48 @@ it contains `value` if `value` is between `min` and `max`, `min` if `value` is less than `min`, `max` if `value` is greater than `max`, and `null` if `value` is `null` or `min > max`. If `min` or `max` are null than no clamping on that side will be performed. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Clamp With Min and Max' -description = ''' +name = "Clamp With Min and Max" +description = """ This example shows the use of `clamp` with both a `min` and `max` value provided. -''' -expression = 'Input.a | clamp(min = 0.5, max = 9.5)' -input_csv = ''' +""" +expression = "Input.a | clamp(min = 0.5, max = 9.5)" +input_csv = """ time,key,a 2021-01-01T00:00:00.000000000Z,A,5.7 2021-01-01T00:00:00.000000000Z,A,6.3 2021-01-01T00:00:00.000000000Z,B, 2021-01-01T00:00:00.000000000Z,A, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,result 2021-01-01T00:00:00.000000000,A,5.7,5.7 2021-01-01T00:00:00.000000000,A,6.3,6.3 2021-01-01T00:00:00.000000000,B,, 2021-01-01T00:00:00.000000000,A,, -''' +""" [[examples]] -name = 'Clamp with Min' -description = ''' +name = "Clamp with Min" +description = """ This example shows the use of clamp with just a minimum bound. -''' -expression = 'Input.a | clamp(min = 0.5)' -input_csv = ''' +""" +expression = "Input.a | clamp(min = 0.5)" +input_csv = """ time,key,a 2021-01-01T00:00:00.000000000Z,A,5.7 2021-01-01T00:00:00.000000000Z,A,6.3 2021-01-01T00:00:00.000000000Z,B, 2021-01-01T00:00:00.000000000Z,A, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,result 2021-01-01T00:00:00.000000000,A,5.7,5.7 2021-01-01T00:00:00.000000000,A,6.3,6.3 2021-01-01T00:00:00.000000000,B,, 2021-01-01T00:00:00.000000000,A,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/coalesce.toml b/crates/sparrow-catalog/catalog/coalesce.toml index 3e92ffbdd..dd2148eb0 100644 --- a/crates/sparrow-catalog/catalog/coalesce.toml +++ b/crates/sparrow-catalog/catalog/coalesce.toml @@ -1,7 +1,7 @@ -name = 'coalesce' -signature = 'coalesce(values+: any) -> any' -short_doc = 'Return first non-`null` value or `null` if all values are `null`.' -long_doc = ''' +name = "coalesce" +signature = "coalesce(values+: any) -> any" +short_doc = "Return first non-`null` value or `null` if all values are `null`." +long_doc = """ ### Parameters * values: One or more values to be coalesced. Note that all of the values must be promotable to the same type. @@ -12,11 +12,11 @@ If all values are `null`, then returns `null`. The type of the result is the minimum type that all of the `values` were [promotable](docs:data-model#type-promotion-rules) to. -''' -tags = ['logical'] +""" +tags = ["logical"] [[examples]] -description = ''' +description = """ In this example we use `coalesce` to apply multiple conditions, almost like a `switch` statement. Each case uses [`if`](#if) to only pass through the cases where the condition is met. @@ -24,8 +24,8 @@ the cases where the condition is met. One thing to be aware of when using `coalesce` like this is that the first non-`null` is taken. Which means that even if a condition is met, if the corresponding value was `null`, it would move on to other conditions. -''' -expression = ''' +""" +expression = """ coalesce( # Tax exempt items Input.value | if(Input.tax_category == 'exempt'), @@ -34,18 +34,18 @@ coalesce( # Normal tax (10%) items Input.value * 1.1 ) -''' -input_csv = ''' +""" +input_csv = """ time,key,value,tax_category 2020-01-01T00:00:00.000000000Z,Ben,10.00,exempt 2020-01-02T00:00:00.000000000Z,Ben,12.00, 2020-01-02T01:00:00.000000000Z,Ryan,13.00,flat 2020-01-02T01:00:00.000000000Z,Ryan,,exempt -''' -output_csv = ''' +""" +output_csv = """ time,key,value,tax_category,result 2020-01-01T00:00:00.000000000,Ben,10.0,exempt,10.0 2020-01-02T00:00:00.000000000,Ben,12.0,,13.200000000000001 2020-01-02T01:00:00.000000000,Ryan,13.0,flat,14.0 2020-01-02T01:00:00.000000000,Ryan,,exempt, -''' +""" diff --git a/crates/sparrow-catalog/catalog/count.toml b/crates/sparrow-catalog/catalog/count.toml index 2ce3b0ed2..7cb4f637f 100644 --- a/crates/sparrow-catalog/catalog/count.toml +++ b/crates/sparrow-catalog/catalog/count.toml @@ -1,7 +1,7 @@ -name = 'count' -signature = 'count(input: any, window: window = null) -> u32' -short_doc = 'Counts each new, non-`null` value in the input.' -long_doc = ''' +name = "count" +signature = "count(input: any, window: window = null) -> u32" +short_doc = "Counts each new, non-`null` value in the input." +long_doc = """ ### Parameters * input: The input to be counted. * window: The window to aggregate within, as described in @@ -13,13 +13,13 @@ See [window functions](#window-functions) for how to specify the aggregation win For each input row, return the count of new, non-`null` rows in `input` up to and including the input row for the given entity. Returns `0` if there have been no such inputs. -''' -tags = ['aggregation'] +""" +tags = ["aggregation"] [[examples]] -name = 'Count' -expression = 'count(Input.value)' -input_csv = ''' +name = "Count" +expression = "count(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,50.7 2021-01-02T00:00:00.000000000Z,Ryan, @@ -27,8 +27,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,1.2 2021-01-04T00:00:00.000000000Z,Ben, 2021-01-05T00:00:00.000000000Z,Ryan,2.3 -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,50.7,1 2021-01-02T00:00:00.000000000,Ryan,,0 @@ -36,4 +36,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,1.2,2 2021-01-04T00:00:00.000000000,Ben,,2 2021-01-05T00:00:00.000000000,Ryan,2.3,2 -''' +""" diff --git a/crates/sparrow-catalog/catalog/count_if.toml b/crates/sparrow-catalog/catalog/count_if.toml index 32be3bca3..6d6d7f29a 100644 --- a/crates/sparrow-catalog/catalog/count_if.toml +++ b/crates/sparrow-catalog/catalog/count_if.toml @@ -1,7 +1,7 @@ -name = 'count_if' -signature = 'count_if(input: any, window: window = null) -> u32' -short_doc = 'Counts each `true` value across in input.' -long_doc = ''' +name = "count_if" +signature = "count_if(input: any, window: window = null) -> u32" +short_doc = "Counts each `true` value across in input." +long_doc = """ ### Parameters * input: The input to be counted. * window: The window to aggregate within, as described in @@ -13,13 +13,13 @@ See [window functions](#window-functions) for how to specify the aggregation win For each input row, return the count of new rows containing `true` in `input` up to and including the input row for the given entity. Returns `0` if there have been no such inputs. -''' -tags = ['aggregation'] +""" +tags = ["aggregation"] [[examples]] -name = 'Count If' -expression = 'count_if(Input.value)' -input_csv = ''' +name = "Count If" +expression = "count_if(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,false 2021-01-02T00:00:00.000000000Z,Ryan,true @@ -27,8 +27,8 @@ time,key,value 2021-01-04T00:00:00.000000000Z,Ben,true 2021-01-04T00:00:00.000000000Z,Ben, 2021-01-05T00:00:00.000000000Z,Ryan,false -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,false,0 2021-01-02T00:00:00.000000000,Ryan,true,1 @@ -36,4 +36,4 @@ time,key,value,result 2021-01-04T00:00:00.000000000,Ben,true,1 2021-01-04T00:00:00.000000000,Ben,,1 2021-01-05T00:00:00.000000000,Ryan,false,2 -''' +""" diff --git a/crates/sparrow-catalog/catalog/daily.toml b/crates/sparrow-catalog/catalog/daily.toml index d9a49fc26..16ff0efe6 100644 --- a/crates/sparrow-catalog/catalog/daily.toml +++ b/crates/sparrow-catalog/catalog/daily.toml @@ -1,7 +1,7 @@ -name = 'daily' -signature = 'daily() -> bool' -short_doc = 'A periodic function that produces a `true` value at the start of each calendar day (UTC).' -long_doc = ''' +name = "daily" +signature = "daily() -> bool" +short_doc = "A periodic function that produces a `true` value at the start of each calendar day (UTC)." +long_doc = """ This function is often used in aggregations to produce windows or as a predicate column. @@ -9,21 +9,21 @@ as a predicate column. Returns a boolean column with each row containing a `true` value at the start of the day, corresponding to time 00:00:00Z, and `null` at all other times. -''' -tags = ['tick'] +""" +tags = ["tick"] [[examples]] -name = 'Daily Aggregated Window' -description = ''' +name = "Daily Aggregated Window" +description = """ In this example, the `daily()` function is used as an argument to the [`since](#since) window function. The result is a windowed aggregation that resets daily. -''' -full_expression = ''' +""" +full_expression = """ { n: Input.n, daily_sum: sum(Input.n, window = since(daily())) } | extend({time: time_of($input), key: first(Input.key) }) -''' -input_csv = ''' +""" +input_csv = """ time,key,n 1996-12-19T04:00:00-00:00,Ben,1 1996-12-19T05:00:00-00:00,Ryan,2 @@ -31,31 +31,31 @@ time,key,n 1996-12-20T22:00:00-00:00,Ben,4 1996-12-21T03:00:00-00:00,Ryan,5 1996-12-21T07:00:00-00:00,Ben,6 -''' -output_csv = ''' +""" +output_csv = """ time,key,n,daily_sum 1996-12-19T04:00:00.000000000,Ben,1,1 1996-12-19T05:00:00.000000000,Ryan,2,2 -1996-12-20T00:00:00.000000000,Ben,,1 1996-12-20T00:00:00.000000000,Ryan,,2 +1996-12-20T00:00:00.000000000,Ben,,1 1996-12-20T01:00:00.000000000,Ben,3,3 1996-12-20T22:00:00.000000000,Ben,4,7 -1996-12-21T00:00:00.000000000,Ben,,7 1996-12-21T00:00:00.000000000,Ryan,, +1996-12-21T00:00:00.000000000,Ben,,7 1996-12-21T03:00:00.000000000,Ryan,5,5 1996-12-21T07:00:00.000000000,Ben,6,6 -''' +""" [[examples]] -name = 'Filter Daily' -description = ''' +name = "Filter Daily" +description = """ In this example, the `daily()` function is used as an argument to the [`when`](#when) function, which filters input. The output includes the last input row before a [`tick`](#tick) occurs. -''' -full_expression = 'Input | last() | when(daily())' -input_csv = ''' +""" +full_expression = "Input | last() | when(daily())" +input_csv = """ time,key,n 1996-12-19T04:00:00-00:00,Ben,1 1996-12-19T05:00:00-00:00,Ryan,2 @@ -63,11 +63,11 @@ time,key,n 1996-12-20T22:00:00-00:00,Ben,4 1996-12-21T03:00:00-00:00,Ryan,5 1996-12-21T07:00:00-00:00,Ben,6 -''' -output_csv = ''' +""" +output_csv = """ time,key,n +1996-12-19T05:00:00.000000000,Ryan,2 1996-12-19T04:00:00.000000000,Ben,1 1996-12-19T05:00:00.000000000,Ryan,2 1996-12-20T22:00:00.000000000,Ben,4 -1996-12-19T05:00:00.000000000,Ryan,2 -''' +""" diff --git a/crates/sparrow-catalog/catalog/day_of_month.toml b/crates/sparrow-catalog/catalog/day_of_month.toml index a8061ef4e..6edb721cc 100644 --- a/crates/sparrow-catalog/catalog/day_of_month.toml +++ b/crates/sparrow-catalog/catalog/day_of_month.toml @@ -1,7 +1,7 @@ -name = 'day_of_month' -signature = 'day_of_month(time: timestamp_ns) -> u32' -short_doc = 'Return the day-of-month for the given time, starting with 1.' -long_doc = ''' +name = "day_of_month" +signature = "day_of_month(time: timestamp_ns) -> u32" +short_doc = "Return the day-of-month for the given time, starting with 1." +long_doc = """ ### Parameters * time: The timestamp to return the day-of-month for. @@ -9,13 +9,13 @@ long_doc = ''' Returns a `u32` column containing the day-of-month for each input `time`. Returns `null` for rows where `time` is `null`. The first day of the month is `1`. The result will be in the range 1 to 31 (inclusive). -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Day of Month' -expression = 'day_of_month(Input.time)' -input_csv = ''' +name = "Day of Month" +expression = "day_of_month(Input.time)" +input_csv = """ time,key 1996-03-21T00:00:00-00:00,Ben 1996-04-21T00:00:00-00:00,Ryan @@ -23,8 +23,8 @@ time,key 1996-06-21T00:00:00-00:00,Ryan 1996-07-21T00:00:00-00:00,Ben 1996-08-21T00:00:00-00:00,Ben -''' -output_csv = ''' +""" +output_csv = """ time,key,result 1996-03-21T00:00:00.000000000,Ben,21 1996-04-21T00:00:00.000000000,Ryan,21 @@ -32,4 +32,4 @@ time,key,result 1996-06-21T00:00:00.000000000,Ryan,21 1996-07-21T00:00:00.000000000,Ben,21 1996-08-21T00:00:00.000000000,Ben,21 -''' +""" diff --git a/crates/sparrow-catalog/catalog/day_of_month0.toml b/crates/sparrow-catalog/catalog/day_of_month0.toml index 64ef0057e..2d4798a5d 100644 --- a/crates/sparrow-catalog/catalog/day_of_month0.toml +++ b/crates/sparrow-catalog/catalog/day_of_month0.toml @@ -1,7 +1,7 @@ -name = 'day_of_month0' -signature = 'day_of_month0(time: timestamp_ns) -> u32' -short_doc = 'Return the day-of-month for the given time, starting with 0.' -long_doc = ''' +name = "day_of_month0" +signature = "day_of_month0(time: timestamp_ns) -> u32" +short_doc = "Return the day-of-month for the given time, starting with 0." +long_doc = """ ### Parameters * time: The timestamp to return the day-of-month for. @@ -9,13 +9,13 @@ long_doc = ''' Returns a `u32` column containing the day-of-month for each input `time`. Returns `null` for rows where `time` is `null`. The first day of the month is `0`. The result will be in the range 0 to 30 (inclusive). -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Day of Month (Zero Based)' -expression = 'day_of_month0(Input.time)' -input_csv = ''' +name = "Day of Month (Zero Based)" +expression = "day_of_month0(Input.time)" +input_csv = """ time,key 1996-03-21T00:00:00-00:00,Ben 1996-04-21T00:00:00-00:00,Ryan @@ -23,8 +23,8 @@ time,key 1996-06-21T00:00:00-00:00,Ryan 1996-07-21T00:00:00-00:00,Ben 1996-08-21T00:00:00-00:00,Ben -''' -output_csv = ''' +""" +output_csv = """ time,key,result 1996-03-21T00:00:00.000000000,Ben,20 1996-04-21T00:00:00.000000000,Ryan,20 @@ -32,4 +32,4 @@ time,key,result 1996-06-21T00:00:00.000000000,Ryan,20 1996-07-21T00:00:00.000000000,Ben,20 1996-08-21T00:00:00.000000000,Ben,20 -''' +""" diff --git a/crates/sparrow-catalog/catalog/day_of_year.toml b/crates/sparrow-catalog/catalog/day_of_year.toml index 6e99d26e6..008ef879e 100644 --- a/crates/sparrow-catalog/catalog/day_of_year.toml +++ b/crates/sparrow-catalog/catalog/day_of_year.toml @@ -1,7 +1,7 @@ -name = 'day_of_year' -signature = 'day_of_year(time: timestamp_ns) -> u32' -short_doc = 'Return the day-of-year for the given time, starting with 1.' -long_doc = ''' +name = "day_of_year" +signature = "day_of_year(time: timestamp_ns) -> u32" +short_doc = "Return the day-of-year for the given time, starting with 1." +long_doc = """ ### Parameters * time: The timestamp to return the day-of-year for. @@ -9,13 +9,13 @@ long_doc = ''' Returns a `u32` column containing the day-of-year for each input `time`. Returns `null` for rows where `time` is `null`. The first day of the month is `1`. The result will be in the range 1 to 366 (inclusive). -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Day Of Year' -expression = 'day_of_year(Input.time)' -input_csv = ''' +name = "Day Of Year" +expression = "day_of_year(Input.time)" +input_csv = """ time,key 1996-03-21T00:00:00-00:00,Ben 1996-04-21T00:00:00-00:00,Ryan @@ -23,8 +23,8 @@ time,key 1996-06-21T00:00:00-00:00,Ryan 1996-07-21T00:00:00-00:00,Ben 1996-08-21T00:00:00-00:00,Ben -''' -output_csv = ''' +""" +output_csv = """ time,key,result 1996-03-21T00:00:00.000000000,Ben,81 1996-04-21T00:00:00.000000000,Ryan,112 @@ -32,4 +32,4 @@ time,key,result 1996-06-21T00:00:00.000000000,Ryan,173 1996-07-21T00:00:00.000000000,Ben,203 1996-08-21T00:00:00.000000000,Ben,234 -''' +""" diff --git a/crates/sparrow-catalog/catalog/day_of_year0.toml b/crates/sparrow-catalog/catalog/day_of_year0.toml index ab5dee097..f7a99d71d 100644 --- a/crates/sparrow-catalog/catalog/day_of_year0.toml +++ b/crates/sparrow-catalog/catalog/day_of_year0.toml @@ -1,7 +1,7 @@ -name = 'day_of_year0' -signature = 'day_of_year0(time: timestamp_ns) -> u32' -short_doc = 'Return the day-of-year for the given time, starting with 0.' -long_doc = ''' +name = "day_of_year0" +signature = "day_of_year0(time: timestamp_ns) -> u32" +short_doc = "Return the day-of-year for the given time, starting with 0." +long_doc = """ ### Parameters * time: The timestamp to return the day-of-year for. @@ -9,13 +9,13 @@ long_doc = ''' Returns a `u32` column containing the day-of-year for each input `time`. Returns `null` for rows where `time` is `null`. The first day of the year is `0`. The result will be in the range 0 to 365 (inclusive). -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Day of Year (Zero Based)' -expression = 'day_of_year0(Input.time)' -input_csv = ''' +name = "Day of Year (Zero Based)" +expression = "day_of_year0(Input.time)" +input_csv = """ time,key 1996-03-21T00:00:00-00:00,Ben 1996-04-21T00:00:00-00:00,Ryan @@ -23,8 +23,8 @@ time,key 1996-06-21T00:00:00-00:00,Ryan 1996-07-21T00:00:00-00:00,Ben 1996-08-21T00:00:00-00:00,Ben -''' -output_csv = ''' +""" +output_csv = """ time,key,result 1996-03-21T00:00:00.000000000,Ben,80 1996-04-21T00:00:00.000000000,Ryan,111 @@ -32,4 +32,4 @@ time,key,result 1996-06-21T00:00:00.000000000,Ryan,172 1996-07-21T00:00:00.000000000,Ben,202 1996-08-21T00:00:00.000000000,Ben,233 -''' +""" diff --git a/crates/sparrow-catalog/catalog/days.toml b/crates/sparrow-catalog/catalog/days.toml index 3d2c40c99..2f6718007 100644 --- a/crates/sparrow-catalog/catalog/days.toml +++ b/crates/sparrow-catalog/catalog/days.toml @@ -1,7 +1,7 @@ -name = 'days' -signature = 'days(days: i64) -> interval_days' -short_doc = 'Produces an interval corresponding to the given number of calendar days.' -long_doc = ''' +name = "days" +signature = "days(days: i64) -> interval_days" +short_doc = "Produces an interval corresponding to the given number of calendar days." +long_doc = """ ### Parameters * days: The number of days to create the interval for. @@ -10,17 +10,17 @@ Returns an `interval_days` column with each row containing the value of `days` converted to an interval with the corresponding number of days. Rows where `days` is `null`, less than `0` or greater than `i32::MAX` will be `null`. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Adding a variable number of days' -description = ''' +name = "Adding a variable number of days" +description = """ This example uses [`add_time`](#add-time) to add the created interval to the `time` column. -''' -expression = 'Input.time | add_time(days(Input.n))' -input_csv = ''' +""" +expression = "Input.time | add_time(days(Input.n))" +input_csv = """ time,key,n 1996-03-21T00:00:00-00:00,Ben,1 1996-04-21T00:00:00-00:00,Ryan,2 @@ -28,8 +28,8 @@ time,key,n 1996-06-21T00:00:00-00:00,Ryan, 1996-07-21T00:00:00-00:00,Ben,2 1996-08-21T00:00:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n,result 1996-03-21T00:00:00.000000000,Ben,1,1996-03-22T00:00:00.000000000 1996-04-21T00:00:00.000000000,Ryan,2,1996-04-23T00:00:00.000000000 @@ -37,4 +37,4 @@ time,key,n,result 1996-06-21T00:00:00.000000000,Ryan,, 1996-07-21T00:00:00.000000000,Ben,2,1996-07-23T00:00:00.000000000 1996-08-21T00:00:00.000000000,Ben,1,1996-08-22T00:00:00.000000000 -''' +""" diff --git a/crates/sparrow-catalog/catalog/days_between.toml b/crates/sparrow-catalog/catalog/days_between.toml index 95955ff25..1771ef47e 100644 --- a/crates/sparrow-catalog/catalog/days_between.toml +++ b/crates/sparrow-catalog/catalog/days_between.toml @@ -1,7 +1,7 @@ -name = 'days_between' -signature = 'days_between(t1: timestamp_ns, t2: timestamp_ns) -> interval_days' -short_doc = 'Returns the number of days between the first and second timestamp.' -long_doc = ''' +name = "days_between" +signature = "days_between(t1: timestamp_ns, t2: timestamp_ns) -> interval_days" +short_doc = "Returns the number of days between the first and second timestamp." +long_doc = """ ### Parameters * t1: The first timestamp * t2: The second timestamp @@ -14,17 +14,17 @@ rounded towards zero. In rows where `t1` or `t2` are `null`, the result will be `null`. If `t1` is before `t2`, the result will be positive. If `t1` is after `t2` the result will be negative. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Days Between' -description = ''' +name = "Days Between" +description = """ Note that the expression uses `as i32` to convert the `interval_days` to the integer number of days. This discards the units. -''' -expression = 'days_between(Input.time, Input.date) as i32' -input_csv = ''' +""" +expression = "days_between(Input.time, Input.date) as i32" +input_csv = """ time,key,date 1996-03-21T00:00:00-00:00,Ben,1996-08-19T00:00:00-00:00 1996-04-21T00:00:00-00:00,Ryan,1995-07-20T00:00:00-00:00 @@ -32,8 +32,8 @@ time,key,date 1996-06-21T00:00:00-00:00,Ryan,1996-06-19T05:00:00-00:00 1996-07-21T00:00:00-00:00,Ben, 1996-08-21T00:00:00-00:00,Ben,1996-08-22T00:00:00-00:00 -''' -output_csv = ''' +""" +output_csv = """ time,key,date,result 1996-03-21T00:00:00.000000000,Ben,1996-08-19T00:00:00.000000000,151 1996-04-21T00:00:00.000000000,Ryan,1995-07-20T00:00:00.000000000,-276 @@ -41,4 +41,4 @@ time,key,date,result 1996-06-21T00:00:00.000000000,Ryan,1996-06-19T05:00:00.000000000,-1 1996-07-21T00:00:00.000000000,Ben,, 1996-08-21T00:00:00.000000000,Ben,1996-08-22T00:00:00.000000000,1 -''' +""" diff --git a/crates/sparrow-catalog/catalog/div.toml b/crates/sparrow-catalog/catalog/div.toml index 78a99217e..54202a509 100644 --- a/crates/sparrow-catalog/catalog/div.toml +++ b/crates/sparrow-catalog/catalog/div.toml @@ -1,8 +1,8 @@ -name = 'div' -signature = 'div(a: number, b: number) -> number' -operator = 'a / b' -short_doc = 'Returns the division of two numbers.' -long_doc = ''' +name = "div" +signature = "div(a: number, b: number) -> number" +operator = "a / b" +short_doc = "Returns the division of two numbers." +long_doc = """ This is the function used for the binary operation `a / b`. ### Parameters @@ -16,25 +16,25 @@ following the [numeric type coercion rules](docs:data-model#numeric-type-coercio Returns a numeric column of the promoted numeric type compatible with both `a` and `b`. The result contains `null` if `a` or `b` was null at that row, or if `b` was `0`. Otherwise the row contains the resulting of dividing `a` by `b`. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Division' -expression = 'Input.a / Input.b' -input_csv = ''' +name = "Division" +expression = "Input.a / Input.b" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,A,5.7,1.2 2021-01-02T00:00:00.000000000Z,A,6.3,0.4 2021-01-03T00:00:00.000000000Z,B,,3.7 2021-01-03T00:00:00.000000000Z,A,13.2, 2021-01-04T00:00:00.000000000Z,A,12.2,0 -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,A,5.7,1.2,4.75 2021-01-02T00:00:00.000000000,A,6.3,0.4,15.749999999999998 2021-01-03T00:00:00.000000000,B,,3.7, 2021-01-03T00:00:00.000000000,A,13.2,, 2021-01-04T00:00:00.000000000,A,12.2,0.0, -''' +""" diff --git a/crates/sparrow-catalog/catalog/else.toml b/crates/sparrow-catalog/catalog/else.toml index f7446519c..465680f55 100644 --- a/crates/sparrow-catalog/catalog/else.toml +++ b/crates/sparrow-catalog/catalog/else.toml @@ -1,7 +1,7 @@ -name = 'else' -signature = 'else(default: any, value: any) -> any' -short_doc = 'Return the `value` if it is non-`null`, `default` otherwise.' -long_doc = ''' +name = "else" +signature = "else(default: any, value: any) -> any" +short_doc = "Return the `value` if it is non-`null`, `default` otherwise." +long_doc = """ ### Parameters * default: The result to use if `value` is `null`. * value: The result to prefer if it is non-`null`. @@ -14,52 +14,52 @@ it with the pipe syntax to provide default values, as in For each row, returns `value` if it is non-`null` in that row, or `default` if `value is `null`. -''' -tags = ['logical'] +""" +tags = ["logical"] [[examples]] -name = 'Choosing between two values' -description = ''' +name = "Choosing between two values" +description = """ In this example the result is `Input.a` if it is non-`null`, and `Input.b` otherwise. This may be combined with [`if`](#if) to conditionaly `null` out cases to implement various logical operations. When chaining multiple conditionals, it may be better to use [`coalesce`](#coalesce). -''' -expression = 'Input.a | else(Input.b)' -input_csv = ''' +""" +expression = "Input.a | else(Input.b)" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,X,57.8,63 2021-01-02T00:00:00.000000000Z,Y,,86.3 2021-01-03T00:00:00.000000000Z,X,6873, 2021-01-04T00:00:00.000000000Z,X,, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,X,57.8,63.0,57.8 2021-01-02T00:00:00.000000000,Y,,86.3,86.3 2021-01-03T00:00:00.000000000,X,6873.0,,6873.0 2021-01-04T00:00:00.000000000,X,,, -''' +""" [[examples]] -name = 'Providing a default value' -description = ''' +name = "Providing a default value" +description = """ This example shows how to use `else` to provide a default value for a possibly `null` value. -''' -expression = 'Input.a | else(42.0)' -input_csv = ''' +""" +expression = "Input.a | else(42.0)" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,X,57.8,63 2021-01-02T00:00:00.000000000Z,Y,,86.3 2021-01-03T00:00:00.000000000Z,X,6873, 2021-01-04T00:00:00.000000000Z,X,, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,X,57.8,63.0,57.8 2021-01-02T00:00:00.000000000,Y,,86.3,42.0 2021-01-03T00:00:00.000000000,X,6873.0,,6873.0 2021-01-04T00:00:00.000000000,X,,,42.0 -''' +""" diff --git a/crates/sparrow-catalog/catalog/eq.toml b/crates/sparrow-catalog/catalog/eq.toml index e0fd8b129..2834054a7 100644 --- a/crates/sparrow-catalog/catalog/eq.toml +++ b/crates/sparrow-catalog/catalog/eq.toml @@ -1,8 +1,8 @@ -name = 'eq' -signature = 'eq(a: any, b: any) -> bool' -operator = 'a == b' -short_doc = 'Return `true` if `a` is equal to `b`.' -long_doc = ''' +name = "eq" +signature = "eq(a: any, b: any) -> bool" +operator = "a == b" +short_doc = "Return `true` if `a` is equal to `b`." +long_doc = """ This is the function used for the binary comparison `a == b`. ### Parameters @@ -17,13 +17,13 @@ they may be promoted to a compatible numeric type following the Returns a `bool` column indicating the results. For each row, it contains `null` if `a` or `b` are `null`, `true` if they are equal and `false` if they are not equal. -''' -tags = ['comparison'] +""" +tags = ["comparison"] [[examples]] -name = 'Equals' -expression = 'Input.a == Input.b' -input_csv = ''' +name = "Equals" +expression = "Input.a == Input.b" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,Ben,50.7,6.0 2021-01-02T00:00:00.000000000Z,Ryan,,70 @@ -32,8 +32,8 @@ time,key,a,b 2021-01-05T00:00:00.000000000Z,Ben,65, 2021-01-06T00:00:00.000000000Z,Jordan,2.3,68.7 2021-01-07T00:00:00.000000000Z,Ryan,, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,Ben,50.7,6.0,false 2021-01-02T00:00:00.000000000,Ryan,,70.0, @@ -42,4 +42,4 @@ time,key,a,b,result 2021-01-05T00:00:00.000000000,Ben,65.0,, 2021-01-06T00:00:00.000000000,Jordan,2.3,68.7,false 2021-01-07T00:00:00.000000000,Ryan,,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/exp.toml b/crates/sparrow-catalog/catalog/exp.toml index 2f1dad94a..7f5b6b66b 100644 --- a/crates/sparrow-catalog/catalog/exp.toml +++ b/crates/sparrow-catalog/catalog/exp.toml @@ -1,7 +1,7 @@ -name = 'exp' -signature = 'exp(power: f64) -> f64' -short_doc = 'Returns `e^power`.' -long_doc = ''' +name = "exp" +signature = "exp(power: f64) -> f64" +short_doc = "Returns `e^power`." +long_doc = """ ### Parameters * power: The power to raise `e` to. @@ -12,21 +12,21 @@ Other numbers will be implicitly promoted. Returns a column of `f64` values. Each row contains `null` if `power` is `null`. Otherwise, the row contains the value `e ^ power`. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Exponential' -expression = 'exp(Input.a)' -input_csv = ''' +name = "Exponential" +expression = "exp(Input.a)" +input_csv = """ time,key,a 2021-01-01T00:00:00.000000000Z,A,5.7 2021-01-02T00:00:00.000000000Z,A,6.3 2021-01-02T00:00:00.000000000Z,B, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,result 2021-01-01T00:00:00.000000000,A,5.7,298.8674009670603 2021-01-02T00:00:00.000000000,A,6.3,544.571910125929 2021-01-02T00:00:00.000000000,B,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/extend.toml b/crates/sparrow-catalog/catalog/extend.toml index 474749d4a..ebdce3e59 100644 --- a/crates/sparrow-catalog/catalog/extend.toml +++ b/crates/sparrow-catalog/catalog/extend.toml @@ -1,7 +1,7 @@ -name = 'extend' -signature = 'extend(new, old) -> extended' -short_doc = 'Extends a record with fields from another.' -long_doc = ''' +name = "extend" +signature = "extend(new, old) -> extended" +short_doc = "Extends a record with fields from another." +long_doc = """ ### Parameters * new: The record column containing the new fields. * old: The record column containing the old fields. @@ -13,25 +13,25 @@ a way to add fields to the `old` record. Returns a column containing the combined record fields from both `old` and `new`. If either `old` or `new` are `null` then the fields from the given record are `null`. If a field exists in both `old` and `new`, the value from `new` is preferred. -''' -tags = ['record'] +""" +tags = ["record"] [[examples]] -name = 'Record Extension' -full_expression = ''' +name = "Record Extension" +full_expression = """ extend(Input, { sum: Input.a + Input.b, five: 5 }) -''' -input_csv = ''' +""" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,A,5,1.2 2021-01-02T00:00:00.000000000Z,A,6.3,0.4 2021-03-01T00:00:00.000000000Z,B,,3.7 2021-04-10T00:00:00.000000000Z,A,13, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,sum,five 2021-01-01T00:00:00.000000000,A,5.0,1.2,6.2,5 2021-01-02T00:00:00.000000000,A,6.3,0.4,6.7,5 2021-03-01T00:00:00.000000000,B,,3.7,,5 2021-04-10T00:00:00.000000000,A,13.0,,,5 -''' +""" diff --git a/crates/sparrow-catalog/catalog/first.toml b/crates/sparrow-catalog/catalog/first.toml index ed1351bb0..96916ee8f 100644 --- a/crates/sparrow-catalog/catalog/first.toml +++ b/crates/sparrow-catalog/catalog/first.toml @@ -1,7 +1,7 @@ -name = 'first' -signature = 'first(input: any, window: window = null) -> any' -short_doc = 'Computes the first value present across the input.' -long_doc = ''' +name = "first" +signature = "first(input: any, window: window = null) -> any" +short_doc = "Computes the first value present across the input." +long_doc = """ ### Parameters * input: The input to be considered. * window: The window to aggregate within, as described in @@ -15,13 +15,13 @@ up to and including the current row. Returns `null` until there has been at least one such input. NOTE: The first value is inclusive of any values at the current time. -''' -tags = ['aggregation'] +""" +tags = ["aggregation"] [[examples]] -name = 'First' -expression = 'first(Input.value)' -input_csv = ''' +name = "First" +expression = "first(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,50.7 2021-01-02T00:00:00.000000000Z,Ryan, @@ -29,8 +29,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,1.2 2021-01-03T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,2.3 -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,50.7,50.7 2021-01-02T00:00:00.000000000,Ryan,, @@ -38,4 +38,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,1.2,50.7 2021-01-03T00:00:00.000000000,Ben,,50.7 2021-01-04T00:00:00.000000000,Ryan,2.3,67.2 -''' +""" diff --git a/crates/sparrow-catalog/catalog/floor.toml b/crates/sparrow-catalog/catalog/floor.toml index 1f78e5e0f..f9a14ebe6 100644 --- a/crates/sparrow-catalog/catalog/floor.toml +++ b/crates/sparrow-catalog/catalog/floor.toml @@ -1,7 +1,7 @@ -name = 'floor' -signature = 'floor(n: number) -> number' -short_doc = 'Rounds the number down to the next smallest integer.' -long_doc = ''' +name = "floor" +signature = "floor(n: number) -> number" +short_doc = "Rounds the number down to the next smallest integer." +long_doc = """ See also [`round`](#round) and [`ceil`](#ceil). ### Parameters @@ -14,23 +14,23 @@ Note: This method may be applied to any numeric type. For anything other than Returns a numeric column of the same type as `n`. The result contains `null` if `n` was null at that row. Otherwise, it contains the result of rounding `n` down to the next smallest integer. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Floor' -expression = 'Input.a | floor()' -input_csv = ''' +name = "Floor" +expression = "Input.a | floor()" +input_csv = """ time,key,a 2021-01-01T00:00:00.000000000Z,A,5.7 2021-01-01T00:00:00.000000000Z,A,6.3 2021-01-02T00:00:00.000000000Z,B, 2021-01-02T00:00:00.000000000Z,B,-2.3 -''' -output_csv = ''' +""" +output_csv = """ time,key,a,result 2021-01-01T00:00:00.000000000,A,5.7,5.0 2021-01-01T00:00:00.000000000,A,6.3,6.0 2021-01-02T00:00:00.000000000,B,, 2021-01-02T00:00:00.000000000,B,-2.3,-3.0 -''' +""" diff --git a/crates/sparrow-catalog/catalog/gt.toml b/crates/sparrow-catalog/catalog/gt.toml index 5449d9427..e8825de2f 100644 --- a/crates/sparrow-catalog/catalog/gt.toml +++ b/crates/sparrow-catalog/catalog/gt.toml @@ -1,8 +1,8 @@ -name = 'gt' -signature = 'gt(a: ordered, b: ordered) -> bool' -operator = 'a > b' -short_doc = 'Return `true` if `a` is greater than `b`.' -long_doc = ''' +name = "gt" +signature = "gt(a: ordered, b: ordered) -> bool" +operator = "a > b" +short_doc = "Return `true` if `a` is greater than `b`." +long_doc = """ This is the function used for the binary comparison `a > b`. ### Parameters @@ -17,13 +17,13 @@ they may be promoted to a compatible numeric type following the Returns a `bool` column indicating the results. For each row, it contains `null` if `a` or `b` are `null`, `true` if `a` is greater than `b`, and `false` if `a` is less than or equal to `b`. -''' -tags = ['comparison'] +""" +tags = ["comparison"] [[examples]] -name = 'Greater Than' -expression = 'Input.a > Input.b' -input_csv = ''' +name = "Greater Than" +expression = "Input.a > Input.b" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,Ben,50.7,6.0 2021-01-02T00:00:00.000000000Z,Ryan,,70 @@ -32,8 +32,8 @@ time,key,a,b 2021-01-05T00:00:00.000000000Z,Ben,65, 2021-01-06T00:00:00.000000000Z,Jordan,2.3,68.7 2021-01-07T00:00:00.000000000Z,Ryan,, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,Ben,50.7,6.0,true 2021-01-02T00:00:00.000000000,Ryan,,70.0, @@ -42,4 +42,4 @@ time,key,a,b,result 2021-01-05T00:00:00.000000000,Ben,65.0,, 2021-01-06T00:00:00.000000000,Jordan,2.3,68.7,false 2021-01-07T00:00:00.000000000,Ryan,,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/gte.toml b/crates/sparrow-catalog/catalog/gte.toml index 4b971e98c..100f874f7 100644 --- a/crates/sparrow-catalog/catalog/gte.toml +++ b/crates/sparrow-catalog/catalog/gte.toml @@ -1,8 +1,8 @@ -name = 'gte' -signature = 'gte(a: ordered, b: ordered) -> bool' -operator = 'a >= b' -short_doc = 'Return `true` if `a` is greater than or equal to `b`.' -long_doc = ''' +name = "gte" +signature = "gte(a: ordered, b: ordered) -> bool" +operator = "a >= b" +short_doc = "Return `true` if `a` is greater than or equal to `b`." +long_doc = """ This is the function used for the binary comparison `a >= b`. ### Parameters @@ -17,13 +17,13 @@ they may be promoted to a compatible numeric type following the Returns a `bool` column indicating the results. For each row, it contains `null` if `a` or `b` are `null`, `true` if `a` is greater than or equal to `b`, and `false` if `a` is less than `b`. -''' -tags = ['comparison'] +""" +tags = ["comparison"] [[examples]] -name = 'Greater Than or Equal To' -expression = 'Input.a >= Input.b' -input_csv = ''' +name = "Greater Than or Equal To" +expression = "Input.a >= Input.b" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,Ben,50.7,6.0 2021-01-02T00:00:00.000000000Z,Ryan,,70 @@ -32,8 +32,8 @@ time,key,a,b 2021-01-05T00:00:00.000000000Z,Ben,65, 2021-01-06T00:00:00.000000000Z,Jordan,2.3,68.7 2021-01-07T00:00:00.000000000Z,Ryan,, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,Ben,50.7,6.0,true 2021-01-02T00:00:00.000000000,Ryan,,70.0, @@ -42,4 +42,4 @@ time,key,a,b,result 2021-01-05T00:00:00.000000000,Ben,65.0,, 2021-01-06T00:00:00.000000000,Jordan,2.3,68.7,false 2021-01-07T00:00:00.000000000,Ryan,,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/hash.toml b/crates/sparrow-catalog/catalog/hash.toml index 122b970e8..94a583230 100644 --- a/crates/sparrow-catalog/catalog/hash.toml +++ b/crates/sparrow-catalog/catalog/hash.toml @@ -1,7 +1,7 @@ -name = 'hash' -signature = 'hash(input: key) -> u64' -short_doc = 'Returns the hash of the `input`.' -long_doc = ''' +name = "hash" +signature = "hash(input: key) -> u64" +short_doc = "Returns the hash of the `input`." +long_doc = """ ### Parameters * input: The argument to hash. @@ -11,13 +11,13 @@ Returns a `u64` column which contains the hash of the `input`. Note: Unlike many functions which return `null` if any of their arguments are `null`, `hash` will never return `null`. -''' -tags = ['misc'] +""" +tags = ["misc"] [[examples]] -name = 'String Hash' -expression = 'hash(Input.value)' -input_csv = ''' +name = "String Hash" +expression = "hash(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,hello 2021-01-01T00:00:00.000000000Z,Ryan, @@ -25,21 +25,21 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,hi 2021-01-04T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,earth -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result -2021-01-01T00:00:00.000000000,Ben,hello,13572866306152653102 -2021-01-01T00:00:00.000000000,Ryan,,8429509363638065888 -2021-01-02T00:00:00.000000000,Ryan,world,16979493163667785006 -2021-01-03T00:00:00.000000000,Ben,hi,16532275944129373820 -2021-01-04T00:00:00.000000000,Ben,,8429509363638065888 -2021-01-04T00:00:00.000000000,Ryan,earth,9400903662585293146 -''' +2021-01-01T00:00:00.000000000,Ben,hello,1472103086483932002 +2021-01-01T00:00:00.000000000,Ryan,,5663277146615294718 +2021-01-02T00:00:00.000000000,Ryan,world,8057155968893317866 +2021-01-03T00:00:00.000000000,Ben,hi,2460612554838835252 +2021-01-04T00:00:00.000000000,Ben,,5663277146615294718 +2021-01-04T00:00:00.000000000,Ryan,earth,14489671231712828724 +""" [[examples]] -name = 'Integer Hash' -expression = 'hash(Input.value)' -input_csv = ''' +name = "Integer Hash" +expression = "hash(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,5 2021-01-01T00:00:00.000000000Z,Ryan,8 @@ -47,13 +47,13 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,8 2021-01-04T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,9 -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result -2021-01-01T00:00:00.000000000,Ben,5,10021492687541564645 -2021-01-01T00:00:00.000000000,Ryan,8,4864632034659211723 -2021-01-02T00:00:00.000000000,Ryan,9,2122274938272070218 -2021-01-03T00:00:00.000000000,Ben,8,4864632034659211723 -2021-01-04T00:00:00.000000000,Ben,,11832085162654999889 -2021-01-04T00:00:00.000000000,Ryan,9,2122274938272070218 -''' +2021-01-01T00:00:00.000000000,Ben,5,16461383214845928621 +2021-01-01T00:00:00.000000000,Ryan,8,6794973171266502674 +2021-01-02T00:00:00.000000000,Ryan,9,15653042715643359010 +2021-01-03T00:00:00.000000000,Ben,8,6794973171266502674 +2021-01-04T00:00:00.000000000,Ben,,0 +2021-01-04T00:00:00.000000000,Ryan,9,15653042715643359010 +""" diff --git a/crates/sparrow-catalog/catalog/hourly.toml b/crates/sparrow-catalog/catalog/hourly.toml index 7149ae064..2df1012ba 100644 --- a/crates/sparrow-catalog/catalog/hourly.toml +++ b/crates/sparrow-catalog/catalog/hourly.toml @@ -1,28 +1,28 @@ -name = 'hourly' -signature = 'hourly() -> bool' -short_doc = 'A periodic function that produces a `true` value at the start of each hour.' -long_doc = ''' +name = "hourly" +signature = "hourly() -> bool" +short_doc = "A periodic function that produces a `true` value at the start of each hour." +long_doc = """ This function is often used in aggregations to produce windows or as a predicate column. ### Results Returns a boolean column with each row containing a `true` value at the start of the hour, and `null` at all other times. -''' -tags = ['tick'] +""" +tags = ["tick"] [[examples]] -name = 'Hourly Aggregated Window' -description = ''' +name = "Hourly Aggregated Window" +description = """ In this example, the `hourly()` function is used as an argument to the [`since](#since) function, which produces a window. The result is a windowed aggregation that resets hourly. -''' -full_expression = ''' +""" +full_expression = """ { n: Input.n, hourly_sum: sum(Input.n, window = since(hourly())) } | extend({time: time_of($input), key: first(Input.key) }) -''' -input_csv = ''' +""" +input_csv = """ time,key,n 1996-12-19T16:00:57-00:00,Ben,2 1996-12-19T16:00:58-00:00,Ryan,3 @@ -30,31 +30,31 @@ time,key,n 1996-12-19T17:01:00-00:00,Ben,9 1996-12-19T17:01:00-00:00,Ryan,8 1996-12-19T18:00:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n,hourly_sum 1996-12-19T16:00:57.000000000,Ben,2,2 1996-12-19T16:00:58.000000000,Ryan,3,3 -1996-12-19T17:00:00.000000000,Ben,,2 1996-12-19T17:00:00.000000000,Ryan,,3 +1996-12-19T17:00:00.000000000,Ben,,2 1996-12-19T17:00:59.000000000,Ben,6,6 1996-12-19T17:01:00.000000000,Ben,9,15 1996-12-19T17:01:00.000000000,Ryan,8,8 1996-12-19T18:00:00.000000000,Ben,1,16 -1996-12-19T18:00:00.000000000,Ben,,16 1996-12-19T18:00:00.000000000,Ryan,,8 -''' +1996-12-19T18:00:00.000000000,Ben,,16 +""" [[examples]] -name = 'Filter Hourly' -description = ''' +name = "Filter Hourly" +description = """ In this example, the `hourly()` function is used as an argument to the [`when`](#when) function, which filters input. The output includes the last input row before a [`tick`](#tick) occurs. -''' -full_expression = 'Input | last() | when(hourly())' -input_csv = ''' +""" +full_expression = "Input | last() | when(hourly())" +input_csv = """ time,key,n 1996-12-19T16:00:57-00:00,Ben,2 1996-12-19T16:00:58-00:00,Ryan,3 @@ -62,11 +62,11 @@ time,key,n 1996-12-19T17:01:00-00:00,Ben,9 1996-12-19T17:01:00-00:00,Ryan,8 1996-12-19T18:00:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n -1996-12-19T16:00:57.000000000,Ben,2 1996-12-19T16:00:58.000000000,Ryan,3 -1996-12-19T18:00:00.000000000,Ben,1 +1996-12-19T16:00:57.000000000,Ben,2 1996-12-19T17:01:00.000000000,Ryan,8 -''' +1996-12-19T18:00:00.000000000,Ben,1 +""" diff --git a/crates/sparrow-catalog/catalog/if.toml b/crates/sparrow-catalog/catalog/if.toml index 652123aef..cb0bfca12 100644 --- a/crates/sparrow-catalog/catalog/if.toml +++ b/crates/sparrow-catalog/catalog/if.toml @@ -1,8 +1,8 @@ -name = 'if' -signature = 'if(condition: bool, value: any) -> any' -short_doc = 'Return the `value` if `condition` is `true`, `null` otherwise.' -long_doc = ''' -`if` "nulls out" the `value` if `condition` is `false`. +name = "if" +signature = "if(condition: bool, value: any) -> any" +short_doc = "Return the `value` if `condition` is `true`, `null` otherwise." +long_doc = """ +`if` \"nulls out\" the `value` if `condition` is `false`. It is equivalent to `null_if(!condition, value)`. See also [`null_if`](#null_if). @@ -12,30 +12,30 @@ See also [`null_if`](#null_if). * value: The value to return if `condition` is `true`. Note: The order of arguments is chosen to allow use with the pipe operation. -Specifically, `value | if(condition)` may be used to conditionally "null-out" +Specifically, `value | if(condition)` may be used to conditionally \"null-out\" the value on the left-hand side. ### Results For each row, return the `value` if `condition` is `true`. Returns `null` if the `condition` is `false` or `null`. -''' -tags = ['logical'] +""" +tags = ["logical"] [[examples]] -expression = 'Input.value | if(Input.condition)' -input_csv = ''' +expression = "Input.value | if(Input.condition)" +input_csv = """ time,key,value,condition 2021-01-01T00:00:00.000000000Z,A,57.8,false 2021-01-02T00:00:00.000000000Z,B,58.7,true 2021-01-03T00:00:00.000000000Z,A,,true 2021-01-04T00:00:00.000000000Z,A,876, 2021-01-05T00:00:00.000000000Z,A,786.0, -''' -output_csv = ''' +""" +output_csv = """ time,key,value,condition,result 2021-01-01T00:00:00.000000000,A,57.8,false, 2021-01-02T00:00:00.000000000,B,58.7,true,58.7 2021-01-03T00:00:00.000000000,A,,true, 2021-01-04T00:00:00.000000000,A,876.0,, 2021-01-05T00:00:00.000000000,A,786.0,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/is_valid.toml b/crates/sparrow-catalog/catalog/is_valid.toml index 0572e03d4..1901e545c 100644 --- a/crates/sparrow-catalog/catalog/is_valid.toml +++ b/crates/sparrow-catalog/catalog/is_valid.toml @@ -1,7 +1,7 @@ -name = 'is_valid' -signature = 'is_valid(input: any) -> bool' -short_doc = 'Returns `true` if `input` is non-`null`.' -long_doc = ''' +name = "is_valid" +signature = "is_valid(input: any) -> bool" +short_doc = "Returns `true` if `input` is non-`null`." +long_doc = """ ### Parameters * input: The input to test for `null`. @@ -12,13 +12,13 @@ Returns a `bool` column that is `true` if the `input` is Note: Unlike many functions which return `null` if any of their arguments are `null`, `is_valid` will never return `null`. -''' -tags = ['misc'] +""" +tags = ["misc"] [[examples]] -name = 'Is Valid' -expression = 'is_valid(Input.value)' -input_csv = ''' +name = "Is Valid" +expression = "is_valid(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,5 2021-01-01T00:00:00.000000000Z,Ryan, @@ -26,8 +26,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,3 2021-01-04T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,2 -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,5,true 2021-01-01T00:00:00.000000000,Ryan,,false @@ -35,4 +35,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,3,true 2021-01-04T00:00:00.000000000,Ben,,false 2021-01-04T00:00:00.000000000,Ryan,2,true -''' +""" diff --git a/crates/sparrow-catalog/catalog/json.toml b/crates/sparrow-catalog/catalog/json.toml index 0524898cd..ff76581af 100644 --- a/crates/sparrow-catalog/catalog/json.toml +++ b/crates/sparrow-catalog/catalog/json.toml @@ -1,12 +1,12 @@ -name = 'json' -signature = 'json(s: string) -> json' -short_doc = 'Creates a JSON object from a string.' -experimental = ''' +name = "json" +signature = "json(s: string) -> json" +short_doc = "Creates a JSON object from a string." +experimental = """ `json` is experimental functionality. You should expect the behavior to potentially change in the future. Certain functionality, such as nested types, are not yet supported. -''' -long_doc = ''' +""" +long_doc = """ This functions converts a JSON string into a JSON object. Fields of the JSON object can be accessed as strings and cast into other types. @@ -15,29 +15,29 @@ the JSON object can be accessed as strings and cast into other types. ### Results Returns a JSON object. -''' -tags = ['string'] +""" +tags = ["string"] [[examples]] -name = 'JSON field access' -expression = 'json(Input.json_string).a' -input_csv = ''' +name = "JSON field access" +expression = "json(Input.json_string).a" +input_csv = """ time,key,json_string -2021-01-01T00:00:00.000000000Z,Ben,"{""a"": 10}" -2021-01-02T00:00:00.000000000Z,Ryan,"{""a"": 2}" -2021-01-03T00:00:00.000000000Z,Ryan,"{""b"": 10}" -2021-01-04T00:00:00.000000000Z,Ben,"{""a"": 4}" -2021-01-05T00:00:00.000000000Z,Ben,"{""c"": 12}" -2021-01-06T00:00:00.000000000Z,Jordan,"{""a"": 0}" -2021-01-07T00:00:00.000000000Z,Ryan,"{""a"": 8}" -''' -output_csv = ''' +2021-01-01T00:00:00.000000000Z,Ben,\"{\"\"a\"\": 10}\" +2021-01-02T00:00:00.000000000Z,Ryan,\"{\"\"a\"\": 2}\" +2021-01-03T00:00:00.000000000Z,Ryan,\"{\"\"b\"\": 10}\" +2021-01-04T00:00:00.000000000Z,Ben,\"{\"\"a\"\": 4}\" +2021-01-05T00:00:00.000000000Z,Ben,\"{\"\"c\"\": 12}\" +2021-01-06T00:00:00.000000000Z,Jordan,\"{\"\"a\"\": 0}\" +2021-01-07T00:00:00.000000000Z,Ryan,\"{\"\"a\"\": 8}\" +""" +output_csv = """ time,key,json_string,result -2021-01-01T00:00:00.000000000,Ben,"{""a"": 10}",10 -2021-01-02T00:00:00.000000000,Ryan,"{""a"": 2}",2 -2021-01-03T00:00:00.000000000,Ryan,"{""b"": 10}", -2021-01-04T00:00:00.000000000,Ben,"{""a"": 4}",4 -2021-01-05T00:00:00.000000000,Ben,"{""c"": 12}", -2021-01-06T00:00:00.000000000,Jordan,"{""a"": 0}",0 -2021-01-07T00:00:00.000000000,Ryan,"{""a"": 8}",8 -''' +2021-01-01T00:00:00.000000000,Ben,\"{\"\"a\"\": 10}\",10 +2021-01-02T00:00:00.000000000,Ryan,\"{\"\"a\"\": 2}\",2 +2021-01-03T00:00:00.000000000,Ryan,\"{\"\"b\"\": 10}\", +2021-01-04T00:00:00.000000000,Ben,\"{\"\"a\"\": 4}\",4 +2021-01-05T00:00:00.000000000,Ben,\"{\"\"c\"\": 12}\", +2021-01-06T00:00:00.000000000,Jordan,\"{\"\"a\"\": 0}\",0 +2021-01-07T00:00:00.000000000,Ryan,\"{\"\"a\"\": 8}\",8 +""" diff --git a/crates/sparrow-catalog/catalog/lag.toml b/crates/sparrow-catalog/catalog/lag.toml index 5c357d2cd..475c768e2 100644 --- a/crates/sparrow-catalog/catalog/lag.toml +++ b/crates/sparrow-catalog/catalog/lag.toml @@ -1,7 +1,7 @@ -name = 'lag' -signature = 'lag(const n: i64, input: ordered) -> ordered' -short_doc = 'Returns a lagging value of `e`.' -long_doc = ''' +name = "lag" +signature = "lag(const n: i64, input: ordered) -> ordered" +short_doc = "Returns a lagging value of `e`." +long_doc = """ ### Parameters * n: The amount of lag to retrieve. For instance, `n = 1` is the previous non-`null` value, `n = 2` is the non-`null` value before that, etc. @@ -9,13 +9,13 @@ long_doc = ''' ### Results Returns a new column with the same type as `input`, but with each row containing the value of `input` from `n` rows earlier (counting only non-`null` rows for the current entity). -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Lag for Previous Value' -expression = 'lag(1, Input.n)' -input_csv = ''' +name = "Lag for Previous Value" +expression = "lag(1, Input.n)" +input_csv = """ time,key,n 1996-03-21T00:00:00-00:00,Ben,1 1996-04-21T00:00:00-00:00,Ryan,2 @@ -23,8 +23,8 @@ time,key,n 1996-06-21T00:00:00-00:00,Ryan,4 1996-07-21T00:00:00-00:00,Ben,5 1996-08-21T00:00:00-00:00,Ben,6 -''' -output_csv = ''' +""" +output_csv = """ time,key,n,result 1996-03-21T00:00:00.000000000,Ben,1, 1996-04-21T00:00:00.000000000,Ryan,2, @@ -32,14 +32,14 @@ time,key,n,result 1996-06-21T00:00:00.000000000,Ryan,4,3 1996-07-21T00:00:00.000000000,Ben,5,1 1996-08-21T00:00:00.000000000,Ben,6,5 -''' +""" [[examples]] -name = 'Lag for Average Change' -description = ''' +name = "Lag for Average Change" +description = """ This example uses `lag` to compute the average difference between values of `n`. -''' -full_expression = ''' +""" +full_expression = """ # Will always be non-`null` after the first non-`null` `Input.n`. let prev_value = Input.n | lag(1) @@ -50,8 +50,8 @@ in difference, mean_difference: mean(difference), } | extend({ time: time_of($input), key: first(Input.key) }) -''' -input_csv = ''' +""" +input_csv = """ time,key,n 1996-03-21T00:00:00-00:00,Ben,1 1996-04-21T00:00:00-00:00,Ryan,2 @@ -59,8 +59,8 @@ time,key,n 1996-06-21T00:00:00-00:00,Ryan,4 1996-07-21T00:00:00-00:00,Ben,5 1996-08-21T00:00:00-00:00,Ben,6 -''' -output_csv = ''' +""" +output_csv = """ time,key,difference,mean_difference 1996-03-21T00:00:00.000000000,Ben,, 1996-04-21T00:00:00.000000000,Ryan,, @@ -68,4 +68,4 @@ time,key,difference,mean_difference 1996-06-21T00:00:00.000000000,Ryan,2,2.0 1996-07-21T00:00:00.000000000,Ben,4,4.0 1996-08-21T00:00:00.000000000,Ben,1,2.5 -''' +""" diff --git a/crates/sparrow-catalog/catalog/last.toml b/crates/sparrow-catalog/catalog/last.toml index 1ff376567..3095fa352 100644 --- a/crates/sparrow-catalog/catalog/last.toml +++ b/crates/sparrow-catalog/catalog/last.toml @@ -1,7 +1,7 @@ -name = 'last' -signature = 'last(input: any, window: window = null) -> any' -short_doc = 'Computes the last value present across the input.' -long_doc = ''' +name = "last" +signature = "last(input: any, window: window = null) -> any" +short_doc = "Computes the last value present across the input." +long_doc = """ ### Parameters * input: The input to be considered. * window: The window to aggregate within, as described in @@ -18,17 +18,17 @@ NOTE: The last value is inclusive of any values at the current time. This means that if the current row is new and non-`null`, the result will be the same of the input. If the input is not new or `null`, this will be the previous value that was new and non-`null`. -''' -tags = ['aggregation'] +""" +tags = ["aggregation"] [[examples]] -name = 'Last' -description = ''' +name = "Last" +description = """ As shown in the example, the last aggregation is useful for extrapolating missing results from the most recent present result. -''' -expression = 'last(Input.value)' -input_csv = ''' +""" +expression = "last(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,50.7 2021-01-02T00:00:00.000000000Z,Ryan, @@ -36,8 +36,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,1.2 2021-01-03T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,2.3 -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,50.7,50.7 2021-01-02T00:00:00.000000000,Ryan,, @@ -45,4 +45,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,1.2,1.2 2021-01-03T00:00:00.000000000,Ben,,1.2 2021-01-04T00:00:00.000000000,Ryan,2.3,2.3 -''' +""" diff --git a/crates/sparrow-catalog/catalog/len.toml b/crates/sparrow-catalog/catalog/len.toml index 128235336..48d4ed750 100644 --- a/crates/sparrow-catalog/catalog/len.toml +++ b/crates/sparrow-catalog/catalog/len.toml @@ -1,7 +1,7 @@ -name = 'len' -signature = 'len(s: string) -> i32' -short_doc = 'Returns the length of the string `s`.' -long_doc = ''' +name = "len" +signature = "len(s: string) -> i32" +short_doc = "Returns the length of the string `s`." +long_doc = """ ### Parameters * s: The string to compute the length of. @@ -9,13 +9,13 @@ long_doc = ''' Returns an `i32` column with each row containing the length of the string `s` in that row. Returns `0` for the empty string and `null` if `s` is `null`. -''' -tags = ['string'] +""" +tags = ["string"] [[examples]] -name = 'String Length' -expression = 'Input.value | len()' -input_csv = ''' +name = "String Length" +expression = "Input.value | len()" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,Hello World 2021-01-02T00:00:00.000000000Z,Ryan,'' @@ -23,8 +23,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,Hello 2021-01-03T00:00:00.000000000Z,Ben,'' 2021-01-04T00:00:00.000000000Z,Ryan,hi -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,Hello World,11 2021-01-02T00:00:00.000000000,Ryan,'',2 @@ -32,4 +32,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,Hello,5 2021-01-03T00:00:00.000000000,Ben,'',2 2021-01-04T00:00:00.000000000,Ryan,hi,2 -''' +""" diff --git a/crates/sparrow-catalog/catalog/logical_and.toml b/crates/sparrow-catalog/catalog/logical_and.toml index bff1cc0af..85f385287 100644 --- a/crates/sparrow-catalog/catalog/logical_and.toml +++ b/crates/sparrow-catalog/catalog/logical_and.toml @@ -1,8 +1,8 @@ -name = 'logical_and' -signature = 'logical_and(a: bool, b: bool) -> bool' -operator = 'a and b' -short_doc = 'Returns the logical conjunction (AND) of two booleans.' -long_doc = ''' +name = "logical_and" +signature = "logical_and(a: bool, b: bool) -> bool" +operator = "a and b" +short_doc = "Returns the logical conjunction (AND) of two booleans." +long_doc = """ This is the function used for the binary operation `a and b`. ### Parameters @@ -13,13 +13,13 @@ This is the function used for the binary operation `a and b`. * Returns `true` if `a` and `b` are both `true`. * Returns `false` if `a` or `b` are `false`. * Returns `null` if `a` or `b` are `null`. -''' -tags = ['logical'] +""" +tags = ["logical"] [[examples]] -name = 'Logical And' -expression = 'Input.a and Input.b' -input_csv = ''' +name = "Logical And" +expression = "Input.a and Input.b" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,A,true,false 2021-01-02T00:00:00.000000000Z,B,true,true @@ -29,8 +29,8 @@ time,key,a,b 2021-02-01T00:00:00.000000000Z,B,true, 2021-02-02T00:00:00.000000000Z,A,,false 2021-03-01T00:00:00.000000000Z,B,false, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,A,true,false,false 2021-01-02T00:00:00.000000000,B,true,true,true @@ -40,4 +40,4 @@ time,key,a,b,result 2021-02-01T00:00:00.000000000,B,true,, 2021-02-02T00:00:00.000000000,A,,false,false 2021-03-01T00:00:00.000000000,B,false,,false -''' +""" diff --git a/crates/sparrow-catalog/catalog/logical_or.toml b/crates/sparrow-catalog/catalog/logical_or.toml index 3845b8ecf..30b7be5b2 100644 --- a/crates/sparrow-catalog/catalog/logical_or.toml +++ b/crates/sparrow-catalog/catalog/logical_or.toml @@ -1,8 +1,8 @@ -name = 'logical_or' -signature = 'logical_or(a: bool, b: bool) -> bool' -operator = 'a or b' -short_doc = 'Returns the logical disjunction (OR) of two booleans.' -long_doc = ''' +name = "logical_or" +signature = "logical_or(a: bool, b: bool) -> bool" +operator = "a or b" +short_doc = "Returns the logical disjunction (OR) of two booleans." +long_doc = """ This is the function used for the binary operation `a or b`. ### Parameters @@ -13,13 +13,13 @@ This is the function used for the binary operation `a or b`. * Returns `true` if `a` or `b` are `true`. * Returns `false` if `a` and `b` are both `false`. * Returns `null` if `a` or `b` are `null`. -''' -tags = ['logical'] +""" +tags = ["logical"] [[examples]] -name = 'Logical Or' -expression = 'Input.a or Input.b' -input_csv = ''' +name = "Logical Or" +expression = "Input.a or Input.b" +input_csv = """ time,subsort,key,a,b 2021-01-01T00:00:00.000000000Z,0,A,true,false 2021-01-02T00:00:00.000000000Z,0,B,true,true @@ -29,8 +29,8 @@ time,subsort,key,a,b 2021-02-01T00:00:00.000000000Z,0,B,true, 2021-02-02T00:00:00.000000000Z,0,A,,false 2021-03-01T00:00:00.000000000Z,0,B,false, -''' -output_csv = ''' +""" +output_csv = """ time,subsort,key,a,b,result 2021-01-01T00:00:00.000000000,0,A,true,false,true 2021-01-02T00:00:00.000000000,0,B,true,true,true @@ -40,4 +40,4 @@ time,subsort,key,a,b,result 2021-02-01T00:00:00.000000000,0,B,true,,true 2021-02-02T00:00:00.000000000,0,A,,false, 2021-03-01T00:00:00.000000000,0,B,false,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/lookup.toml b/crates/sparrow-catalog/catalog/lookup.toml index 8a8dcb8e7..eb9e404b7 100644 --- a/crates/sparrow-catalog/catalog/lookup.toml +++ b/crates/sparrow-catalog/catalog/lookup.toml @@ -1,7 +1,7 @@ -name = 'lookup' -signature = 'lookup(key: key, value: any) -> any' -short_doc = 'Looks up the value for a foreign key.' -long_doc = ''' +name = "lookup" +signature = "lookup(key: key, value: any) -> any" +short_doc = "Looks up the value for a foreign key." +long_doc = """ Performs a lookup join between the `key` and the computed `value` from a foreign entity. ### Parameters @@ -14,16 +14,16 @@ Performs a lookup join between the `key` and the computed `value` from a foreign ### Results For each row with a non-`null` key, returns the value at that time from the `value` computed for the entity identified by the `key`. Yields `null` if the `key` is `null` or if there is no foreign value computed for that key at the corresponding time. -''' -tags = ['grouping'] +""" +tags = ["grouping"] [[examples]] -name = 'Lookup' -description = ''' +name = "Lookup" +description = """ This example operates on customer reviews. It augments each review with the average rating the customer has given and the average rating the product has received, up to that point in time. -''' -full_expression = ''' +""" +full_expression = """ # This is the average review a product has received (keyed by products) let average_review_by_product = ProductReviewsByProduct.stars | mean() @@ -41,39 +41,39 @@ in average_customer_review, average_product_review, } | extend({ time: time_of($input)} ) -''' -output_csv = ''' +""" +output_csv = """ time,key,average_customer_review,average_product_review 2021-01-01T00:00:00.000000000,krabby_patty,3.0,3.0 2021-01-02T00:00:00.000000000,coral_bits,3.5,4.0 2021-03-01T00:00:00.000000000,krabby_patty,5.0,4.0 2021-04-10T00:00:00.000000000,krabby_patty,2.6666666666666665,3.0 -''' +""" [[examples.tables]] -name = 'ProductReviewsByProduct' -uuid = 'dd440605-4cee-431b-b208-360ec00a2192' -time_column_name = 'time' -group_column_name = 'product_id' -grouping = 'products' -input_csv = ''' +name = "ProductReviewsByProduct" +uuid = "dd440605-4cee-431b-b208-360ec00a2192" +time_column_name = "time" +group_column_name = "product_id" +grouping = "products" +input_csv = """ time,customer_id,product_id,stars 2021-01-01T00:00:00.000000000Z,Patrick,krabby_patty,3 2021-01-02T00:00:00.000000000Z,Patrick,coral_bits,4 2021-03-01T00:00:00.000000000Z,Squidward,krabby_patty,5 2021-04-10T00:00:00.000000000Z,Patrick,krabby_patty,1 -''' +""" [[examples.tables]] -name = 'ProductReviewsByCustomer' -uuid = '7ababffe-a104-4f8b-8288-20d9ce8fb162' -time_column_name = 'time' -group_column_name = 'customer_id' -grouping = 'customers' -input_csv = ''' +name = "ProductReviewsByCustomer" +uuid = "7ababffe-a104-4f8b-8288-20d9ce8fb162" +time_column_name = "time" +group_column_name = "customer_id" +grouping = "customers" +input_csv = """ time,customer_id,product_id,stars 2021-01-01T00:00:00.000000000Z,Patrick,krabby_patty,3 2021-01-02T00:00:00.000000000Z,Patrick,coral_bits,4 2021-03-01T00:00:00.000000000Z,Squidward,krabby_patty,5 2021-04-10T00:00:00.000000000Z,Patrick,krabby_patty,1 -''' +""" diff --git a/crates/sparrow-catalog/catalog/lower.toml b/crates/sparrow-catalog/catalog/lower.toml index b5397104f..eda6c9373 100644 --- a/crates/sparrow-catalog/catalog/lower.toml +++ b/crates/sparrow-catalog/catalog/lower.toml @@ -1,7 +1,7 @@ -name = 'lower' -signature = 'lower(s: string) -> string' -short_doc = 'Converts the string to lower case.' -long_doc = ''' +name = "lower" +signature = "lower(s: string) -> string" +short_doc = "Converts the string to lower case." +long_doc = """ ### Parameters * s: The string to convert to lower case. @@ -10,13 +10,13 @@ long_doc = ''' Returns a `string` column with each row containing the string `s` from that row converted to all lower case. The row contains `null` if `s` is `null` in that row. -''' -tags = ['string'] +""" +tags = ["string"] [[examples]] -name = 'Lower Case' -expression = 'Input.value | lower()' -input_csv = ''' +name = "Lower Case" +expression = "Input.value | lower()" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,Hello World 2021-01-02T00:00:00.000000000Z,Ryan, @@ -24,8 +24,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,Hello 2021-01-03T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,hi -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,Hello World,hello world 2021-01-02T00:00:00.000000000,Ryan,, @@ -33,4 +33,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,Hello,hello 2021-01-03T00:00:00.000000000,Ben,, 2021-01-04T00:00:00.000000000,Ryan,hi,hi -''' +""" diff --git a/crates/sparrow-catalog/catalog/lt.toml b/crates/sparrow-catalog/catalog/lt.toml index a46cf7b06..cedf92b1d 100644 --- a/crates/sparrow-catalog/catalog/lt.toml +++ b/crates/sparrow-catalog/catalog/lt.toml @@ -1,8 +1,8 @@ -name = 'lt' -signature = 'lt(a: ordered, b: ordered) -> bool' -operator = 'a < b' -short_doc = 'Return `true` if `a` is less than `b`.' -long_doc = ''' +name = "lt" +signature = "lt(a: ordered, b: ordered) -> bool" +operator = "a < b" +short_doc = "Return `true` if `a` is less than `b`." +long_doc = """ This is the function used for the binary comparison `a < b`. ### Parameters @@ -17,13 +17,13 @@ they may be promoted to a compatible numeric type following the Returns a `bool` column indicating the results. For each row, it contains `null` if `a` or `b` are `null`, `true` if `a` is less than `b` and `false` if `a` is greater than or equal to `b`. -''' -tags = ['comparison'] +""" +tags = ["comparison"] [[examples]] -name = 'Less Than' -expression = 'Input.a < Input.b' -input_csv = ''' +name = "Less Than" +expression = "Input.a < Input.b" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,Ben,50.7,6.0 2021-01-02T00:00:00.000000000Z,Ryan,,70 @@ -32,8 +32,8 @@ time,key,a,b 2021-01-05T00:00:00.000000000Z,Ben,65, 2021-01-06T00:00:00.000000000Z,Jordan,2.3,68.7 2021-01-07T00:00:00.000000000Z,Ryan,, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,Ben,50.7,6.0,false 2021-01-02T00:00:00.000000000,Ryan,,70.0, @@ -42,4 +42,4 @@ time,key,a,b,result 2021-01-05T00:00:00.000000000,Ben,65.0,, 2021-01-06T00:00:00.000000000,Jordan,2.3,68.7,true 2021-01-07T00:00:00.000000000,Ryan,,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/lte.toml b/crates/sparrow-catalog/catalog/lte.toml index 486fc8e06..bf22a0288 100644 --- a/crates/sparrow-catalog/catalog/lte.toml +++ b/crates/sparrow-catalog/catalog/lte.toml @@ -1,8 +1,8 @@ -name = 'lte' -signature = 'lte(a: ordered, b: ordered) -> bool' -operator = 'a < b' -short_doc = 'Return `true` if `a` is less than or equal to `b`.' -long_doc = ''' +name = "lte" +signature = "lte(a: ordered, b: ordered) -> bool" +operator = "a < b" +short_doc = "Return `true` if `a` is less than or equal to `b`." +long_doc = """ This is the function used for the binary comparison `a <= b`. ### Parameters @@ -17,13 +17,13 @@ they may be promoted to a compatible numeric type following the Returns a `bool` column indicating the results. For each row, it contains `null` if `a` or `b` are `null`, `true` if `a` is less than or equal to `b`, and `false` if `a` is greater than `b`. -''' -tags = ['comparison'] +""" +tags = ["comparison"] [[examples]] -name = 'Less Than or Equal To' -expression = 'Input.a <= Input.b' -input_csv = ''' +name = "Less Than or Equal To" +expression = "Input.a <= Input.b" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,Ben,50.7,6.0 2021-01-02T00:00:00.000000000Z,Ryan,,70 @@ -32,8 +32,8 @@ time,key,a,b 2021-01-05T00:00:00.000000000Z,Ben,65, 2021-01-06T00:00:00.000000000Z,Jordan,2.3,68.7 2021-01-07T00:00:00.000000000Z,Ryan,, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,Ben,50.7,6.0,false 2021-01-02T00:00:00.000000000,Ryan,,70.0, @@ -42,4 +42,4 @@ time,key,a,b,result 2021-01-05T00:00:00.000000000,Ben,65.0,, 2021-01-06T00:00:00.000000000,Jordan,2.3,68.7,true 2021-01-07T00:00:00.000000000,Ryan,,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/max.toml b/crates/sparrow-catalog/catalog/max.toml index 7b8834a8b..196a4f07a 100644 --- a/crates/sparrow-catalog/catalog/max.toml +++ b/crates/sparrow-catalog/catalog/max.toml @@ -1,7 +1,7 @@ -name = 'max' -signature = 'max(input: ordered, window: window = null) -> ordered' -short_doc = 'Computes the maximum of values across the input.' -long_doc = ''' +name = "max" +signature = "max(input: ordered, window: window = null) -> ordered" +short_doc = "Computes the maximum of values across the input." +long_doc = """ This is an aggregation that computes the maximum across multiple rows. See [`zip_max`](#zip-max) to take the maximum of two values from each row. @@ -17,16 +17,16 @@ See [window functions](#window-functions) for how to specify the aggregation win For each input row, return the maximum of new, non-`null` rows in `input` up to and including the input row for the given entity. Returns `null` until there has been at least one such input. -''' +""" tags = [ - 'aggregation', - 'math', + "aggregation", + "math", ] [[examples]] -name = 'Maximum' -expression = 'max(Input.value)' -input_csv = ''' +name = "Maximum" +expression = "max(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,50.7 2021-01-01T00:00:00.000000000Z,Ryan, @@ -34,8 +34,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,1.2 2021-01-04T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,2.3 -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,50.7,50.7 2021-01-01T00:00:00.000000000,Ryan,, @@ -43,4 +43,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,1.2,50.7 2021-01-04T00:00:00.000000000,Ben,,50.7 2021-01-04T00:00:00.000000000,Ryan,2.3,67.2 -''' +""" diff --git a/crates/sparrow-catalog/catalog/mean.toml b/crates/sparrow-catalog/catalog/mean.toml index a56bb2538..f8214227b 100644 --- a/crates/sparrow-catalog/catalog/mean.toml +++ b/crates/sparrow-catalog/catalog/mean.toml @@ -1,7 +1,7 @@ -name = 'mean' -signature = 'mean(input: number, window: window = null) -> f64' -short_doc = 'Computes the arithmetic mean of values across the input.' -long_doc = ''' +name = "mean" +signature = "mean(input: number, window: window = null) -> f64" +short_doc = "Computes the arithmetic mean of values across the input." +long_doc = """ ### Parameters * input: The input to compute the mean of. * window: The window to aggregate within, as described in @@ -13,16 +13,16 @@ See [window functions](#window-functions) for how to specify the aggregation win For each input row, return the mean of new, non-`null` rows in `input` up to and including the input row for the given entity. Returns `null` until there has been at least one such input. -''' +""" tags = [ - 'aggregation', - 'math', + "aggregation", + "math", ] [[examples]] -name = 'Mean' -expression = 'mean(Input.value)' -input_csv = ''' +name = "Mean" +expression = "mean(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,50.7 2021-01-01T00:00:00.000000000Z,Ryan, @@ -30,8 +30,8 @@ time,key,value 2021-01-02T00:00:00.000000000Z,Ben,1.2 2021-01-03T00:00:00.000000000Z,Ben, 2021-01-03T00:00:00.000000000Z,Ryan,2.3 -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,50.7,50.7 2021-01-01T00:00:00.000000000,Ryan,, @@ -39,4 +39,4 @@ time,key,value,result 2021-01-02T00:00:00.000000000,Ben,1.2,25.950000000000003 2021-01-03T00:00:00.000000000,Ben,,25.950000000000003 2021-01-03T00:00:00.000000000,Ryan,2.3,34.75 -''' +""" diff --git a/crates/sparrow-catalog/catalog/min.toml b/crates/sparrow-catalog/catalog/min.toml index 18fa458d9..ed527b1e1 100644 --- a/crates/sparrow-catalog/catalog/min.toml +++ b/crates/sparrow-catalog/catalog/min.toml @@ -1,7 +1,7 @@ -name = 'min' -signature = 'min(input: ordered, window: window = null) -> ordered' -short_doc = 'Computes the minimum of values across the input.' -long_doc = ''' +name = "min" +signature = "min(input: ordered, window: window = null) -> ordered" +short_doc = "Computes the minimum of values across the input." +long_doc = """ This is an aggregation that computes the minimum across multiple rows. See [`zip_min`](#zip-min) to take the minimum of two values from each row. @@ -17,16 +17,16 @@ See [window functions](#window-functions) for how to specify the aggregation win For each input row, return the minimum of new, non-`null` rows in `input` up to and including the input row for the given entity. Returns `null` until there has been at least one such input. -''' +""" tags = [ - 'aggregation', - 'math', + "aggregation", + "math", ] [[examples]] -name = 'Minimum' -expression = 'min(Input.value)' -input_csv = ''' +name = "Minimum" +expression = "min(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,50.7 2021-01-01T00:00:00.000000000Z,Ryan, @@ -34,8 +34,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,1.2 2021-01-04T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,2.3 -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,50.7,50.7 2021-01-01T00:00:00.000000000,Ryan,, @@ -43,4 +43,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,1.2,1.2 2021-01-04T00:00:00.000000000,Ben,,1.2 2021-01-04T00:00:00.000000000,Ryan,2.3,2.3 -''' +""" diff --git a/crates/sparrow-catalog/catalog/minutely.toml b/crates/sparrow-catalog/catalog/minutely.toml index e286b9f8b..fd4f930b7 100644 --- a/crates/sparrow-catalog/catalog/minutely.toml +++ b/crates/sparrow-catalog/catalog/minutely.toml @@ -1,28 +1,28 @@ -name = 'minutely' -signature = 'minutely() -> bool' -short_doc = 'A periodic function that produces a `true` value at the start of each minutely.' -long_doc = ''' +name = "minutely" +signature = "minutely() -> bool" +short_doc = "A periodic function that produces a `true` value at the start of each minutely." +long_doc = """ This function is often used in aggregations to produce windows or as a predicate column. ### Results Returns a boolean column with each row containing a `true` value at the start of each minute, and `null` at all other times. -''' -tags = ['tick'] +""" +tags = ["tick"] [[examples]] -name = 'Minutely Aggregated Window' -description = ''' +name = "Minutely Aggregated Window" +description = """ In this example, the `minutely()` function is used as an argument to the [`since](#since) function, which produces a window. The result is a windowed aggregation that resets minutely. -''' -full_expression = ''' +""" +full_expression = """ { n: Input.n, hourly_sum: sum(Input.n, window = since(minutely())) } | extend({time: time_of($input), key: first(Input.key) }) -''' -input_csv = ''' +""" +input_csv = """ time,key,n 1996-12-19T16:00:57-00:00,Ben,2 1996-12-19T16:00:58-00:00,Ryan,3 @@ -30,33 +30,33 @@ time,key,n 1996-12-19T16:02:00-00:00,Ben,9 1996-12-19T16:02:00-00:00,Ryan,8 1996-12-19T16:03:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n,hourly_sum 1996-12-19T16:00:57.000000000,Ben,2,2 1996-12-19T16:00:58.000000000,Ryan,3,3 -1996-12-19T16:01:00.000000000,Ben,,2 1996-12-19T16:01:00.000000000,Ryan,,3 +1996-12-19T16:01:00.000000000,Ben,,2 1996-12-19T16:01:59.000000000,Ben,6,6 1996-12-19T16:02:00.000000000,Ben,9,15 1996-12-19T16:02:00.000000000,Ryan,8,8 -1996-12-19T16:02:00.000000000,Ben,,15 1996-12-19T16:02:00.000000000,Ryan,,8 +1996-12-19T16:02:00.000000000,Ben,,15 1996-12-19T16:03:00.000000000,Ben,1,1 -1996-12-19T16:03:00.000000000,Ben,,1 1996-12-19T16:03:00.000000000,Ryan,, -''' +1996-12-19T16:03:00.000000000,Ben,,1 +""" [[examples]] -name = 'Filter Minutely' -description = ''' +name = "Filter Minutely" +description = """ In this example, the `minutely()` function is used as an argument to the [`when`](#when) function, which filters input. The output includes the last input row before a [`tick`](#tick) occurs. -''' -full_expression = 'Input | last() | when(minutely())' -input_csv = ''' +""" +full_expression = "Input | last() | when(minutely())" +input_csv = """ time,key,n 1996-12-19T16:00:57-00:00,Ben,2 1996-12-19T16:00:58-00:00,Ryan,3 @@ -64,13 +64,13 @@ time,key,n 1996-12-19T16:02:00-00:00,Ben,9 1996-12-19T16:02:00-00:00,Ryan,8 1996-12-19T16:03:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n -1996-12-19T16:00:57.000000000,Ben,2 1996-12-19T16:00:58.000000000,Ryan,3 +1996-12-19T16:00:57.000000000,Ben,2 +1996-12-19T16:02:00.000000000,Ryan,8 1996-12-19T16:02:00.000000000,Ben,9 1996-12-19T16:02:00.000000000,Ryan,8 1996-12-19T16:03:00.000000000,Ben,1 -1996-12-19T16:02:00.000000000,Ryan,8 -''' +""" diff --git a/crates/sparrow-catalog/catalog/month_of_year.toml b/crates/sparrow-catalog/catalog/month_of_year.toml index f02bc1a7c..470bf4b9a 100644 --- a/crates/sparrow-catalog/catalog/month_of_year.toml +++ b/crates/sparrow-catalog/catalog/month_of_year.toml @@ -1,7 +1,7 @@ -name = 'month_of_year' -signature = 'month_of_year(time: timestamp_ns) -> u32' -short_doc = 'Return the month-of-year for the given time, starting with 1.' -long_doc = ''' +name = "month_of_year" +signature = "month_of_year(time: timestamp_ns) -> u32" +short_doc = "Return the month-of-year for the given time, starting with 1." +long_doc = """ ### Parameters * time: The timestamp to return the month-of-year for. @@ -9,13 +9,13 @@ long_doc = ''' Returns a `u32` column containing the month-of-year for each input `time`. Returns `null` for rows where `time` is `null`. January is `1`. The result will be in the range 1 to 12 (inclusive). -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Month of Year' -expression = 'month_of_year(Input.time)' -input_csv = ''' +name = "Month of Year" +expression = "month_of_year(Input.time)" +input_csv = """ time,key 1996-03-21T00:00:00-00:00,Ben 1996-04-21T00:00:00-00:00,Ryan @@ -23,8 +23,8 @@ time,key 1996-06-21T00:00:00-00:00,Ryan 1996-07-21T00:00:00-00:00,Ben 1996-08-21T00:00:00-00:00,Ben -''' -output_csv = ''' +""" +output_csv = """ time,key,result 1996-03-21T00:00:00.000000000,Ben,3 1996-04-21T00:00:00.000000000,Ryan,4 @@ -32,4 +32,4 @@ time,key,result 1996-06-21T00:00:00.000000000,Ryan,6 1996-07-21T00:00:00.000000000,Ben,7 1996-08-21T00:00:00.000000000,Ben,8 -''' +""" diff --git a/crates/sparrow-catalog/catalog/month_of_year0.toml b/crates/sparrow-catalog/catalog/month_of_year0.toml index 66dd82b0a..cd71b9fab 100644 --- a/crates/sparrow-catalog/catalog/month_of_year0.toml +++ b/crates/sparrow-catalog/catalog/month_of_year0.toml @@ -1,7 +1,7 @@ -name = 'month_of_year0' -signature = 'month_of_year0(time: timestamp_ns) -> u32' -short_doc = 'Return the month-of-year for the given time, starting with 0.' -long_doc = ''' +name = "month_of_year0" +signature = "month_of_year0(time: timestamp_ns) -> u32" +short_doc = "Return the month-of-year for the given time, starting with 0." +long_doc = """ ### Parameters * time: The timestamp to return the day-of-month for. @@ -9,13 +9,13 @@ long_doc = ''' Returns a `u32` column containing the month-of-year for each input `time`. Returns `null` for rows where `time` is `null`. January is `1`. The result will be in the range 0 to 11 (inclusive). -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Month of Year (Zero Based)' -expression = 'month_of_year0(Input.time)' -input_csv = ''' +name = "Month of Year (Zero Based)" +expression = "month_of_year0(Input.time)" +input_csv = """ time,key 1996-03-21T00:00:00-00:00,Ben 1996-04-21T00:00:00-00:00,Ryan @@ -23,8 +23,8 @@ time,key 1996-06-21T00:00:00-00:00,Ryan 1996-07-21T00:00:00-00:00,Ben 1996-08-21T00:00:00-00:00,Ben -''' -output_csv = ''' +""" +output_csv = """ time,key,result 1996-03-21T00:00:00.000000000,Ben,2 1996-04-21T00:00:00.000000000,Ryan,3 @@ -32,4 +32,4 @@ time,key,result 1996-06-21T00:00:00.000000000,Ryan,5 1996-07-21T00:00:00.000000000,Ben,6 1996-08-21T00:00:00.000000000,Ben,7 -''' +""" diff --git a/crates/sparrow-catalog/catalog/monthly.toml b/crates/sparrow-catalog/catalog/monthly.toml index 605e1f9dc..be87ca753 100644 --- a/crates/sparrow-catalog/catalog/monthly.toml +++ b/crates/sparrow-catalog/catalog/monthly.toml @@ -1,28 +1,28 @@ -name = 'monthly' -signature = 'monthly() -> bool' -short_doc = 'A periodic function that produces a `true` value at the start of each calendar month (UTC).' -long_doc = ''' +name = "monthly" +signature = "monthly() -> bool" +short_doc = "A periodic function that produces a `true` value at the start of each calendar month (UTC)." +long_doc = """ This function is often used in aggregations to produce windows or as a predicate column. ### Results Returns a boolean column with each row containing a `true` value at the start of each calendar month, and `null` at all other times. -''' -tags = ['tick'] +""" +tags = ["tick"] [[examples]] -name = 'Monthly Aggregated Window' -description = ''' +name = "Monthly Aggregated Window" +description = """ In this example, the `monthly()` function is used as an argument to the [`since](#since) function, which produces a window. The result is a windowed aggregation that resets at the start of each calendar month. -''' -full_expression = ''' +""" +full_expression = """ { n: Input.n, monthly_sum: sum(Input.n, window = since(monthly())) } | extend({time: time_of($input), key: first(Input.key) }) -''' -input_csv = ''' +""" +input_csv = """ time,key,n 1996-02-19T16:00:00-00:00,Ben,2 1996-02-19T16:00:00-00:00,Ryan,3 @@ -30,33 +30,33 @@ time,key,n 1996-04-20T16:01:00-00:00,Ben,9 1996-04-21T16:00:00-00:00,Ryan,8 1996-05-21T16:00:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n,monthly_sum 1996-02-19T16:00:00.000000000,Ben,2,2 1996-02-19T16:00:00.000000000,Ryan,3,3 -1996-03-01T00:00:00.000000000,Ben,,2 1996-03-01T00:00:00.000000000,Ryan,,3 -1996-04-01T00:00:00.000000000,Ben,, +1996-03-01T00:00:00.000000000,Ben,,2 1996-04-01T00:00:00.000000000,Ryan,, +1996-04-01T00:00:00.000000000,Ben,, 1996-04-20T16:00:00.000000000,Ben,6,6 1996-04-20T16:01:00.000000000,Ben,9,15 1996-04-21T16:00:00.000000000,Ryan,8,8 -1996-05-01T00:00:00.000000000,Ben,,15 1996-05-01T00:00:00.000000000,Ryan,,8 +1996-05-01T00:00:00.000000000,Ben,,15 1996-05-21T16:00:00.000000000,Ben,1,1 -''' +""" [[examples]] -name = 'Filter Monthly' -description = ''' +name = "Filter Monthly" +description = """ In this example, the `monthly()` function is used as an argument to the [`when`](#when) function, which filters input. The output includes the last input row before a [`tick`](#tick) occurs. -''' -full_expression = 'Input | last() | when(monthly())' -input_csv = ''' +""" +full_expression = "Input | last() | when(monthly())" +input_csv = """ time,key,n 1996-02-19T16:00:00-00:00,Ben,2 1996-02-19T16:00:00-00:00,Ryan,3 @@ -64,13 +64,13 @@ time,key,n 1996-04-20T16:01:00-00:00,Ben,9 1996-04-21T16:00:00-00:00,Ryan,8 1996-05-21T16:00:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n -1996-02-19T16:00:00.000000000,Ben,2 1996-02-19T16:00:00.000000000,Ryan,3 1996-02-19T16:00:00.000000000,Ben,2 1996-02-19T16:00:00.000000000,Ryan,3 -1996-04-20T16:01:00.000000000,Ben,9 +1996-02-19T16:00:00.000000000,Ben,2 1996-04-21T16:00:00.000000000,Ryan,8 -''' +1996-04-20T16:01:00.000000000,Ben,9 +""" diff --git a/crates/sparrow-catalog/catalog/months.toml b/crates/sparrow-catalog/catalog/months.toml index 78aa8df58..8e1a2a4a6 100644 --- a/crates/sparrow-catalog/catalog/months.toml +++ b/crates/sparrow-catalog/catalog/months.toml @@ -1,7 +1,7 @@ -name = 'months' -signature = 'months(months: i64) -> interval_months' -short_doc = 'Produces an interval corresponding to the given number of calendar months.' -long_doc = ''' +name = "months" +signature = "months(months: i64) -> interval_months" +short_doc = "Produces an interval corresponding to the given number of calendar months." +long_doc = """ ### Parameters * months: The number of calendar months to create the interval for. @@ -10,16 +10,16 @@ Returns an `interval_months` column with each row containing the value of `months` converted to an interval with the corresponding number of calendar months. Rows where `months` is `null`, less than `i32::MIN` or greater than `i32::MAX` will be `null`. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -description = ''' +description = """ This example uses [`add_time`](#add-time) to add the created interval to the `time` column. -''' -expression = 'Input.time | add_time(months(Input.n))' -input_csv = ''' +""" +expression = "Input.time | add_time(months(Input.n))" +input_csv = """ time,key,n 1996-03-21T00:00:00-00:00,Ben,1 1996-04-21T00:00:00-00:00,Ryan,2 @@ -27,8 +27,8 @@ time,key,n 1996-06-21T00:00:00-00:00,Ryan, 1996-07-21T00:00:00-00:00,Ben,2 1996-08-21T00:00:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n,result 1996-03-21T00:00:00.000000000,Ben,1,1996-04-21T00:00:00.000000000 1996-04-21T00:00:00.000000000,Ryan,2,1996-06-21T00:00:00.000000000 @@ -36,4 +36,4 @@ time,key,n,result 1996-06-21T00:00:00.000000000,Ryan,, 1996-07-21T00:00:00.000000000,Ben,2,1996-09-21T00:00:00.000000000 1996-08-21T00:00:00.000000000,Ben,1,1996-09-21T00:00:00.000000000 -''' +""" diff --git a/crates/sparrow-catalog/catalog/months_between.toml b/crates/sparrow-catalog/catalog/months_between.toml index 4c548d3c7..480e0a245 100644 --- a/crates/sparrow-catalog/catalog/months_between.toml +++ b/crates/sparrow-catalog/catalog/months_between.toml @@ -1,7 +1,7 @@ -name = 'months_between' -signature = 'months_between(t1: timestamp_ns, t2: timestamp_ns) -> interval_months' -short_doc = 'Returns the number of months between the first and second timestamp.' -long_doc = ''' +name = "months_between" +signature = "months_between(t1: timestamp_ns, t2: timestamp_ns) -> interval_months" +short_doc = "Returns the number of months between the first and second timestamp." +long_doc = """ ### Parameters * t1: The first timestamp * t2: The second timestamp @@ -13,17 +13,17 @@ of calendar months between the two timestamps. In rows where `t1` or `t2` are `null`, the result will be `null`. If `t1` is before `t2`, the result will be positive. If `t1` is after `t2` the result will be negative. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Months Between' -description = ''' +name = "Months Between" +description = """ Note that the expression uses `as i32` to convert the `interval_months` to the integer number of months. This discards the units. -''' -expression = 'months_between(Input.time, Input.date) as i32' -input_csv = ''' +""" +expression = "months_between(Input.time, Input.date) as i32" +input_csv = """ time,key,date 1996-03-21T00:00:00-00:00,Ben,1996-08-19T00:00:00-00:00 1996-04-21T00:00:00-00:00,Ryan,1995-07-20T00:00:00-00:00 @@ -31,8 +31,8 @@ time,key,date 1996-06-21T00:00:00-00:00,Ryan,1996-08-19T05:00:00-00:00 1996-07-21T00:00:00-00:00,Ben, 1996-08-21T00:00:00-00:00,Ben,1996-08-22T00:00:00-00:00 -''' -output_csv = ''' +""" +output_csv = """ time,key,date,result 1996-03-21T00:00:00.000000000,Ben,1996-08-19T00:00:00.000000000,5 1996-04-21T00:00:00.000000000,Ryan,1995-07-20T00:00:00.000000000,-9 @@ -40,4 +40,4 @@ time,key,date,result 1996-06-21T00:00:00.000000000,Ryan,1996-08-19T05:00:00.000000000,2 1996-07-21T00:00:00.000000000,Ben,, 1996-08-21T00:00:00.000000000,Ben,1996-08-22T00:00:00.000000000,0 -''' +""" diff --git a/crates/sparrow-catalog/catalog/mul.toml b/crates/sparrow-catalog/catalog/mul.toml index a965d2dda..d10a74fc1 100644 --- a/crates/sparrow-catalog/catalog/mul.toml +++ b/crates/sparrow-catalog/catalog/mul.toml @@ -1,8 +1,8 @@ -name = 'mul' -signature = 'mul(a: number, b: number) -> number' -operator = 'a * b' -short_doc = 'Returns the product of two numbers.' -long_doc = ''' +name = "mul" +signature = "mul(a: number, b: number) -> number" +operator = "a * b" +short_doc = "Returns the product of two numbers." +long_doc = """ This is the function used for the binary operation `a * b`. ### Parameters @@ -16,23 +16,23 @@ following the [numeric type coercion rules](docs:data-model#numeric-type-coercio Returns a numeric column of the promoted numeric type compatible with both `a` and `b`. The result contains `null` if `a` or `b` was null at that row. Otherwise the row contains the product of `a` and `b`. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Multiplication' -expression = 'Input.a * Input.b' -input_csv = ''' +name = "Multiplication" +expression = "Input.a * Input.b" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,A,5.7,1.2 2021-01-01T00:00:00.000000000Z,A,6.3,0.4 2021-01-01T00:00:00.000000000Z,B,,3.7 2021-01-01T00:00:00.000000000Z,A,13.2, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,A,5.7,1.2,6.84 2021-01-01T00:00:00.000000000,A,6.3,0.4,2.52 2021-01-01T00:00:00.000000000,B,,3.7, 2021-01-01T00:00:00.000000000,A,13.2,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/neg.toml b/crates/sparrow-catalog/catalog/neg.toml index 7c336d268..cbbcab034 100644 --- a/crates/sparrow-catalog/catalog/neg.toml +++ b/crates/sparrow-catalog/catalog/neg.toml @@ -1,8 +1,8 @@ -name = 'neg' -signature = 'neg(n: signed) -> signed' -operator = '-n' -short_doc = 'Returns the negation of `n`.' -long_doc = ''' +name = "neg" +signature = "neg(n: signed) -> signed" +operator = "-n" +short_doc = "Returns the negation of `n`." +long_doc = """ This is the function used for the unary operation `-n`. ### Parameters @@ -17,25 +17,25 @@ signed integer type. If it is `u64` it is promoted to `f64`. ### Results For each row in the input, returns `null` if `n` is `null`. Otherwise, returns the negation of `n`. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Negation' -expression = '-Input.a' -input_csv = ''' +name = "Negation" +expression = "-Input.a" +input_csv = """ time,key,a 2021-01-01T00:00:00.000000000Z,A,5.7 2021-01-01T00:00:00.000000000Z,A,6.3 2021-01-02T00:00:00.000000000Z,B, 2021-01-02T00:00:00.000000000Z,B,-2.2 2021-01-03T00:00:00.000000000Z,B,0 -''' -output_csv = ''' +""" +output_csv = """ time,key,a,result 2021-01-01T00:00:00.000000000,A,5.7,-5.7 2021-01-01T00:00:00.000000000,A,6.3,-6.3 2021-01-02T00:00:00.000000000,B,, 2021-01-02T00:00:00.000000000,B,-2.2,2.2 2021-01-03T00:00:00.000000000,B,0.0,0.0 -''' +""" diff --git a/crates/sparrow-catalog/catalog/neq.toml b/crates/sparrow-catalog/catalog/neq.toml index 074640630..399484378 100644 --- a/crates/sparrow-catalog/catalog/neq.toml +++ b/crates/sparrow-catalog/catalog/neq.toml @@ -1,8 +1,8 @@ -name = 'neq' -signature = 'neq(a: any, b: any) -> bool' -operator = 'a != b' -short_doc = 'Return `true` if `a` is not equal to `b`.' -long_doc = ''' +name = "neq" +signature = "neq(a: any, b: any) -> bool" +operator = "a != b" +short_doc = "Return `true` if `a` is not equal to `b`." +long_doc = """ This is the function used for the binary comparison `a != b`. ### Parameters @@ -17,13 +17,13 @@ they may be promoted to a compatible numeric type following the Returns a `bool` column indicating the results. For each row, it contains `null` if `a` or `b` are `null`, `true` if they are not equal and `false` if they are equal. -''' -tags = ['comparison'] +""" +tags = ["comparison"] [[examples]] -name = 'Not Equals' -expression = 'Input.a != Input.b' -input_csv = ''' +name = "Not Equals" +expression = "Input.a != Input.b" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,Ben,50.7,6.0 2021-01-02T00:00:00.000000000Z,Ryan,,70 @@ -32,8 +32,8 @@ time,key,a,b 2021-01-05T00:00:00.000000000Z,Ben,65, 2021-01-06T00:00:00.000000000Z,Jordan,2.3,68.7 2021-01-07T00:00:00.000000000Z,Ryan,, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,Ben,50.7,6.0,true 2021-01-02T00:00:00.000000000,Ryan,,70.0, @@ -42,4 +42,4 @@ time,key,a,b,result 2021-01-05T00:00:00.000000000,Ben,65.0,, 2021-01-06T00:00:00.000000000,Jordan,2.3,68.7,true 2021-01-07T00:00:00.000000000,Ryan,,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/not.toml b/crates/sparrow-catalog/catalog/not.toml index 7d6a16672..6a537fd63 100644 --- a/crates/sparrow-catalog/catalog/not.toml +++ b/crates/sparrow-catalog/catalog/not.toml @@ -1,8 +1,8 @@ -name = 'not' -signature = 'not(input: bool) -> bool' -operator = '!input' -short_doc = 'Returns the logical negation of a boolean.' -long_doc = ''' +name = "not" +signature = "not(input: bool) -> bool" +operator = "!input" +short_doc = "Returns the logical negation of a boolean." +long_doc = """ This is the function used for the unary operation `!input`. ### Parameters @@ -11,12 +11,12 @@ This is the function used for the unary operation `!input`. ### Results For each row, return `true` if `input` is `false`, `false` if `input` is `true` and `null` if `input` is `null`. -''' -tags = ['logical'] +""" +tags = ["logical"] [[examples]] -expression = '!Input.a' -input_csv = ''' +expression = "!Input.a" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,A,true,false 2021-01-02T00:00:00.000000000Z,B,true,true @@ -25,8 +25,8 @@ time,key,a,b 2021-02-01T00:00:00.000000000Z,A,,true 2021-02-02T00:00:00.000000000Z,B,true, 2021-03-01T00:00:00.000000000Z,A,,false -2021-03-03T00:00:00.000000000Z,B,false,''' -output_csv = ''' +2021-03-03T00:00:00.000000000Z,B,false,""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,A,true,false,false 2021-01-02T00:00:00.000000000,B,true,true,false @@ -36,4 +36,4 @@ time,key,a,b,result 2021-02-02T00:00:00.000000000,B,true,,false 2021-03-01T00:00:00.000000000,A,,false, 2021-03-03T00:00:00.000000000,B,false,,true -''' +""" diff --git a/crates/sparrow-catalog/catalog/null_if.toml b/crates/sparrow-catalog/catalog/null_if.toml index 6a5e75004..87ddd808c 100644 --- a/crates/sparrow-catalog/catalog/null_if.toml +++ b/crates/sparrow-catalog/catalog/null_if.toml @@ -1,8 +1,8 @@ -name = 'null_if' -signature = 'null_if(condition: bool, value: any) -> any' -short_doc = 'Return the `value` if `condition` is `false`, `null` otherwise.' -long_doc = ''' -`null_if` "nulls out" the `value` if `condition` is `true`. +name = "null_if" +signature = "null_if(condition: bool, value: any) -> any" +short_doc = "Return the `value` if `condition` is `false`, `null` otherwise." +long_doc = """ +`null_if` \"nulls out\" the `value` if `condition` is `true`. It is equivalent to `if(!condition, value)`](#if). See also [`if`](#if). @@ -12,31 +12,31 @@ See also [`if`](#if). * value: The value to return if `condition` is `false`. Note: The order of arguments is chosen to allow use with the pipe operation. -Specifically, `value | null_if(condition)` may be used to conditionally "null-out" +Specifically, `value | null_if(condition)` may be used to conditionally \"null-out\" the value on the left-hand side. ### Results For each row, return the `value` if `condition` is `false`. Returns `null` if the `condition` is `true` or `null`. -''' -tags = ['logical'] +""" +tags = ["logical"] [[examples]] -name = 'Null If' -expression = 'Input.value | null_if(Input.condition)' -input_csv = ''' +name = "Null If" +expression = "Input.value | null_if(Input.condition)" +input_csv = """ time,key,value,condition 2021-01-01T00:00:00.000000000Z,A,57.8,false 2021-01-02T00:00:00.000000000Z,B,58.7,true 2021-01-03T00:00:00.000000000Z,A,,true 2021-01-04T00:00:00.000000000Z,A,876, 2021-01-05T00:00:00.000000000Z,A,786.0, -''' -output_csv = ''' +""" +output_csv = """ time,key,value,condition,result 2021-01-01T00:00:00.000000000,A,57.8,false,57.8 2021-01-02T00:00:00.000000000,B,58.7,true, 2021-01-03T00:00:00.000000000,A,,true, 2021-01-04T00:00:00.000000000,A,876.0,, 2021-01-05T00:00:00.000000000,A,786.0,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/powf.toml b/crates/sparrow-catalog/catalog/powf.toml index 9dfacbb8a..d23532355 100644 --- a/crates/sparrow-catalog/catalog/powf.toml +++ b/crates/sparrow-catalog/catalog/powf.toml @@ -1,7 +1,7 @@ -name = 'powf' -signature = 'powf(base: f64, power: f64) -> f64' -short_doc = 'Returns `base^power`.' -long_doc = ''' +name = "powf" +signature = "powf(base: f64, power: f64) -> f64" +short_doc = "Returns `base^power`." +long_doc = """ ### Parameters * base: The base to raise to the given power. * power: The power to raise the base to. @@ -13,23 +13,23 @@ Other numbers will be implicitly promoted. Returns a column of `f64` values. Each row contains `null` if `base` or `power` are `null`. Otherwise, the row contains the value `base ^ power`. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Power' -expression = 'powf(Input.a, Input.b)' -input_csv = ''' +name = "Power" +expression = "powf(Input.a, Input.b)" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,A,5.7,1.2 2021-01-01T00:00:00.000000000Z,A,6.3,0.4 2021-01-02T00:00:00.000000000Z,B,,3.7 2021-01-03T00:00:00.000000000Z,A,13.2, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,A,5.7,1.2,8.073276500106656 2021-01-01T00:00:00.000000000,A,6.3,0.4,2.0880275269924504 2021-01-02T00:00:00.000000000,B,,3.7, 2021-01-03T00:00:00.000000000,A,13.2,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/remove_fields.toml b/crates/sparrow-catalog/catalog/remove_fields.toml index 0940c7f60..8744fdd58 100644 --- a/crates/sparrow-catalog/catalog/remove_fields.toml +++ b/crates/sparrow-catalog/catalog/remove_fields.toml @@ -1,7 +1,7 @@ -name = 'remove_fields' -signature = 'remove_fields(record, fields: string+) -> record' -short_doc = 'Remove fields from a record.' -long_doc = ''' +name = "remove_fields" +signature = "remove_fields(record, fields: string+) -> record" +short_doc = "Remove fields from a record." +long_doc = """ Note: If more fields are being removed than retained, you can use [`select_fields`](#select_fields). @@ -17,25 +17,25 @@ syntax you must be explicit, as in the example. Returns a column containing the fields in `record` not listed in `fields`, with the corresponding values from `record`. The result is `null` in rows where `record` is `null`. -''' -tags = ['record'] +""" +tags = ["record"] [[examples]] -name = 'Record Field Filtering' -full_expression = ''' +name = "Record Field Filtering" +full_expression = """ Input | remove_fields($input, 'c') -''' -input_csv = ''' +""" +input_csv = """ time,key,a,b,c 2021-01-01T00:00:00.000000000Z,A,5,1.2,true 2021-01-02T00:00:00.000000000Z,A,6.3,0.4,false 2021-03-01T00:00:00.000000000Z,B,,3.7,true 2021-04-10T00:00:00.000000000Z,A,13,,true -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000,A,5.0,1.2 2021-01-02T00:00:00.000000000,A,6.3,0.4 2021-03-01T00:00:00.000000000,B,,3.7 2021-04-10T00:00:00.000000000,A,13.0, -''' +""" diff --git a/crates/sparrow-catalog/catalog/round.toml b/crates/sparrow-catalog/catalog/round.toml index 5d9572cd2..7de9eb038 100644 --- a/crates/sparrow-catalog/catalog/round.toml +++ b/crates/sparrow-catalog/catalog/round.toml @@ -1,7 +1,7 @@ -name = 'round' -signature = 'round(n: number) -> number' -short_doc = 'Rounds the number to the nearest integer.' -long_doc = ''' +name = "round" +signature = "round(n: number) -> number" +short_doc = "Rounds the number to the nearest integer." +long_doc = """ See also [`ceil`](#ceil) and [`floor`](#floor). ### Parameters @@ -17,21 +17,21 @@ Otherwise, it contains the result of rounding `n` to the nearest integer. Numbers half-way between two integers are rounded away from `0`. For example, `0.5` rounds to `1.0` and `-0.5` rounds to `-1.0`. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Round' -expression = 'Input.a | round()' -input_csv = ''' +name = "Round" +expression = "Input.a | round()" +input_csv = """ time,key,a 2021-01-01T00:00:00.000000000Z,A,5.7 2021-01-01T00:00:00.000000000Z,A,6.3 2021-01-02T00:00:00.000000000Z,B, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,result 2021-01-01T00:00:00.000000000,A,5.7,6.0 2021-01-01T00:00:00.000000000,A,6.3,6.0 2021-01-02T00:00:00.000000000,B,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/seconds.toml b/crates/sparrow-catalog/catalog/seconds.toml index 77d2a1c68..f7486f276 100644 --- a/crates/sparrow-catalog/catalog/seconds.toml +++ b/crates/sparrow-catalog/catalog/seconds.toml @@ -1,7 +1,7 @@ -name = 'seconds' -signature = 'seconds(seconds: i64) -> duration_s' -short_doc = 'Produces a duration corresponding to the given number of seconds.' -long_doc = ''' +name = "seconds" +signature = "seconds(seconds: i64) -> duration_s" +short_doc = "Produces a duration corresponding to the given number of seconds." +long_doc = """ ### Parameters * seconds: The number of seconds to create the duration for. @@ -9,16 +9,16 @@ long_doc = ''' Returns a `duration_s` column with each row containing the value of `seconds` converted to the corresponding duration. Rows where `seconds` is `null` will be `null`. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -description = ''' +description = """ This example uses [`add_time`](#add-time) to add the created duration to the `time` column. -''' -expression = 'Input.time | add_time(seconds(Input.n))' -input_csv = ''' +""" +expression = "Input.time | add_time(seconds(Input.n))" +input_csv = """ time,key,n 1996-03-21T00:00:00-00:00,Ben,1 1996-04-21T00:00:00-00:00,Ryan,2 @@ -26,8 +26,8 @@ time,key,n 1996-06-21T00:00:00-00:00,Ryan, 1996-07-21T00:00:00-00:00,Ben,2 1996-08-21T00:00:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n,result 1996-03-21T00:00:00.000000000,Ben,1,1996-03-21T00:00:01.000000000 1996-04-21T00:00:00.000000000,Ryan,2,1996-04-21T00:00:02.000000000 @@ -35,4 +35,4 @@ time,key,n,result 1996-06-21T00:00:00.000000000,Ryan,, 1996-07-21T00:00:00.000000000,Ben,2,1996-07-21T00:00:02.000000000 1996-08-21T00:00:00.000000000,Ben,1,1996-08-21T00:00:01.000000000 -''' +""" diff --git a/crates/sparrow-catalog/catalog/seconds_between.toml b/crates/sparrow-catalog/catalog/seconds_between.toml index 04e1e0fb5..89cc08ce7 100644 --- a/crates/sparrow-catalog/catalog/seconds_between.toml +++ b/crates/sparrow-catalog/catalog/seconds_between.toml @@ -1,7 +1,7 @@ -name = 'seconds_between' -signature = 'seconds_between(t1: timestamp_ns, t2: timestamp_ns) -> duration_s' -short_doc = 'Returns the number of seconds between the first and second timestamp.' -long_doc = ''' +name = "seconds_between" +signature = "seconds_between(t1: timestamp_ns, t2: timestamp_ns) -> duration_s" +short_doc = "Returns the number of seconds between the first and second timestamp." +long_doc = """ ### Parameters * t1: The first timestamp * t2: The second timestamp @@ -13,17 +13,17 @@ between the two timestamps. In rows where `t1` or `t2` are `null`, the result will be `null`. If `t1` is before `t2`, the result will be positive. If `t1` is after `t2`, the result will be negative. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Seconds Between' -description = ''' +name = "Seconds Between" +description = """ Note that the expression uses `as i64` to convert the `duration_s` to the integer number of seconds. This discards the units. -''' -expression = 'seconds_between(Input.time, Input.date) as i64' -input_csv = ''' +""" +expression = "seconds_between(Input.time, Input.date) as i64" +input_csv = """ time,key,date 1996-03-21T00:00:00-00:00,Ben,1996-08-19T00:00:00-00:00 1996-04-21T00:00:00-00:00,Ryan,1995-07-20T00:00:00-00:00 @@ -31,8 +31,8 @@ time,key,date 1996-06-21T00:00:00-00:00,Ryan,1996-08-19T05:00:00-00:00 1996-07-21T00:00:00-00:00,Ben, 1996-08-21T00:00:00-00:00,Ben,1996-08-22T00:00:00-00:00 -''' -output_csv = ''' +""" +output_csv = """ time,key,date,result 1996-03-21T00:00:00.000000000,Ben,1996-08-19T00:00:00.000000000,13046400 1996-04-21T00:00:00.000000000,Ryan,1995-07-20T00:00:00.000000000,-23846400 @@ -40,4 +40,4 @@ time,key,date,result 1996-06-21T00:00:00.000000000,Ryan,1996-08-19T05:00:00.000000000,5115600 1996-07-21T00:00:00.000000000,Ben,, 1996-08-21T00:00:00.000000000,Ben,1996-08-22T00:00:00.000000000,86400 -''' +""" diff --git a/crates/sparrow-catalog/catalog/select_fields.toml b/crates/sparrow-catalog/catalog/select_fields.toml index 3611a57c8..3eb160f67 100644 --- a/crates/sparrow-catalog/catalog/select_fields.toml +++ b/crates/sparrow-catalog/catalog/select_fields.toml @@ -1,7 +1,7 @@ -name = 'select_fields' -signature = 'select_fields(record, fields: string+) -> record' -short_doc = 'Limits fields in a record to a given set.' -long_doc = ''' +name = "select_fields" +signature = "select_fields(record, fields: string+) -> record" +short_doc = "Limits fields in a record to a given set." +long_doc = """ Note: If more fields are being selected than removed, you can use [`remove_fields`](#remove_fields). @@ -17,25 +17,25 @@ syntax you must be explicit, as in the example. Returns a column containing the record fields listed in `fields` with the corresponding values from `record`. The result is `null` in rows where `record` is `null`. -''' -tags = ['record'] +""" +tags = ["record"] [[examples]] -name = 'Record Field Selection' -full_expression = ''' +name = "Record Field Selection" +full_expression = """ Input | select_fields($input, 'key', 'a', 'b') -''' -input_csv = ''' +""" +input_csv = """ time,key,a,b,c 2021-01-01T00:00:00.000000000Z,A,5,1.2,true 2021-01-02T00:00:00.000000000Z,A,6.3,0.4,false 2021-03-01T00:00:00.000000000Z,B,,3.7,true 2021-04-10T00:00:00.000000000Z,A,13,,true -''' -output_csv = ''' +""" +output_csv = """ key,a,b A,5.0,1.2 A,6.3,0.4 B,,3.7 A,13.0, -''' +""" diff --git a/crates/sparrow-catalog/catalog/shift_by.toml b/crates/sparrow-catalog/catalog/shift_by.toml index 178b18f5b..74da858b2 100644 --- a/crates/sparrow-catalog/catalog/shift_by.toml +++ b/crates/sparrow-catalog/catalog/shift_by.toml @@ -1,7 +1,7 @@ -name = 'shift_by' -signature = 'shift_by(delta: timedelta, value: any) -> any' -short_doc = 'Produces the current `value` shifted forward by the given `delta`.' -long_doc = ''' +name = "shift_by" +signature = "shift_by(delta: timedelta, value: any) -> any" +short_doc = "Produces the current `value` shifted forward by the given `delta`." +long_doc = """ ### Parameters * delta: The time delta to shift the value by. See other [time functions](#time-functions) for how to create `timedelta`s. @@ -15,16 +15,16 @@ If multiple values for the same entity key are shifted to the same time, all of them will be emitted in the order they originally appeared. New `subsort` IDs will be assigned to each row. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Shift By' -description = ''' +name = "Shift By" +description = """ This example uses `shift_by` to shift values from `Input` forward by 1 month. -''' -full_expression = 'Input | shift_by(months(1))' -input_csv = ''' +""" +full_expression = "Input | shift_by(months(1))" +input_csv = """ time,key,date,n 1996-03-21T00:00:00-00:00,Ben,1996-08-19T00:00:00-00:00,1 1996-04-21T00:00:00-00:00,Ryan,1996-07-20T00:00:00-00:00,2 @@ -32,8 +32,8 @@ time,key,date,n 1996-06-21T00:00:00-00:00,Ryan,1996-05-22T00:00:00-00:00,4 1996-07-21T00:00:00-00:00,Ben,1996-07-22T00:00:00-00:00,5 1996-08-21T00:00:00-00:00,Ben,1996-08-22T00:00:00-00:00,6 -''' -output_csv = ''' +""" +output_csv = """ time,key,date,n 1996-03-21T00:00:00.000000000,Ben,1996-08-19T00:00:00.000000000,1 1996-04-21T00:00:00.000000000,Ryan,1996-07-20T00:00:00.000000000,2 @@ -41,4 +41,4 @@ time,key,date,n 1996-06-21T00:00:00.000000000,Ryan,1996-05-22T00:00:00.000000000,4 1996-07-21T00:00:00.000000000,Ben,1996-07-22T00:00:00.000000000,5 1996-08-21T00:00:00.000000000,Ben,1996-08-22T00:00:00.000000000,6 -''' +""" diff --git a/crates/sparrow-catalog/catalog/shift_to.toml b/crates/sparrow-catalog/catalog/shift_to.toml index 6a9c06d83..2605400c0 100644 --- a/crates/sparrow-catalog/catalog/shift_to.toml +++ b/crates/sparrow-catalog/catalog/shift_to.toml @@ -1,7 +1,7 @@ -name = 'shift_to' -signature = 'shift_to(time: timestamp_ns, value: any) -> any' -short_doc = 'Produces the current `value` shifted forward to the given `time`.' -long_doc = ''' +name = "shift_to" +signature = "shift_to(time: timestamp_ns, value: any) -> any" +short_doc = "Produces the current `value` shifted forward to the given `time`." +long_doc = """ ### Parameters * time: Column containing the times to shift values to. * value: The values to be shifted. @@ -15,21 +15,21 @@ If multiple values for the same entity key are shifted to the same time, all of them will be emitted in the order they originally appeared. New `subsort` IDs will be assigned to each row. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Shift To' -description = ''' +name = "Shift To" +description = """ This example uses `shift_to` to shift values from `Input` forward to the `date` field. The order of rows (shown in field `n`) changes based on the order of `date`. Since the row containing `n = 4` has a `date` less than the `time`, it is dropped. The rows with `n = 3` and `n = 5` had the same `date`. We see that they have both been shifted to the same time, and the original order preserved within that time. -''' -full_expression = 'Input | shift_to(Input.date)' -input_csv = ''' +""" +full_expression = "Input | shift_to(Input.date)" +input_csv = """ time,key,date,n 1996-03-21T00:00:00-00:00,Ben,1996-08-19T00:00:00-00:00,1 1996-04-21T00:00:00-00:00,Ryan,1996-07-20T00:00:00-00:00,2 @@ -37,12 +37,12 @@ time,key,date,n 1996-06-21T00:00:00-00:00,Ryan,1996-05-22T00:00:00-00:00,4 1996-07-21T00:00:00-00:00,Ben,1996-07-22T00:00:00-00:00,5 1996-08-21T00:00:00-00:00,Ben,1996-08-22T00:00:00-00:00,6 -''' -output_csv = ''' +""" +output_csv = """ time,key,date,n 1996-04-21T00:00:00.000000000,Ryan,1996-07-20T00:00:00.000000000,2 1996-05-21T00:00:00.000000000,Ryan,1996-07-22T00:00:00.000000000,3 1996-07-21T00:00:00.000000000,Ben,1996-07-22T00:00:00.000000000,5 1996-03-21T00:00:00.000000000,Ben,1996-08-19T00:00:00.000000000,1 1996-08-21T00:00:00.000000000,Ben,1996-08-22T00:00:00.000000000,6 -''' +""" diff --git a/crates/sparrow-catalog/catalog/shift_until.toml b/crates/sparrow-catalog/catalog/shift_until.toml index b9aac565c..5efc473ba 100644 --- a/crates/sparrow-catalog/catalog/shift_until.toml +++ b/crates/sparrow-catalog/catalog/shift_until.toml @@ -1,7 +1,7 @@ -name = 'shift_until' -signature = 'shift_until(predicate: bool, value: any) -> any' -short_doc = 'Produces the `value` shifted forward to the time the `predicate` is true.' -long_doc = ''' +name = "shift_until" +signature = "shift_until(predicate: bool, value: any) -> any" +short_doc = "Produces the `value` shifted forward to the time the `predicate` is true." +long_doc = """ ### Parameters * predicate: The predicate to determine whether to emit shifted rows. * value: The value to shift until the `predicate` is true. @@ -14,19 +14,19 @@ If multiple values for the same entity are shifted to the same time, all of them New `subsort` IDs will be assigned to each row. A value may be produced at the same time it occurs if the `predicate` evaluates to true at that time. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Shift Until' -description = ''' +name = "Shift Until" +description = """ This examples uses `shift_until` to shift values from `Input` forward until the condition is true. We see that the rows are output in the original order (seen by looking at the `n` column). Rows where the `condition` is `true` cause rows to be output at that time, including any preceding (but not yet output) rows. Also note that the final row (with `n = 7`) has not yet been output, since the condition has not been `true` after it (yet). -''' -full_expression = 'Input | shift_until(Input.condition)' -input_csv = ''' +""" +full_expression = "Input | shift_until(Input.condition)" +input_csv = """ time,key,condition,n 1996-03-21T00:00:00-00:00,Ben,true,1 1996-04-21T00:00:00-00:00,Ryan,false,2 @@ -35,8 +35,8 @@ time,key,condition,n 1996-07-21T00:00:00-00:00,Ben,,5 1996-08-21T00:00:00-00:00,Ben,true,6 1996-06-21T00:00:00-00:00,Ryan,false,7 -''' -output_csv = ''' +""" +output_csv = """ time,key,condition,n 1996-03-21T00:00:00.000000000,Ben,true,1 1996-04-21T00:00:00.000000000,Ryan,false,2 @@ -44,4 +44,4 @@ time,key,condition,n 1996-06-21T00:00:00.000000000,Ryan,true,4 1996-07-21T00:00:00.000000000,Ben,,5 1996-08-21T00:00:00.000000000,Ben,true,6 -''' +""" diff --git a/crates/sparrow-catalog/catalog/since.toml b/crates/sparrow-catalog/catalog/since.toml index 26d31dd86..58570dbbf 100644 --- a/crates/sparrow-catalog/catalog/since.toml +++ b/crates/sparrow-catalog/catalog/since.toml @@ -1,7 +1,7 @@ -name = 'since' -signature = 'since(condition: bool) -> window' -short_doc = 'Configures a windowed aggregation.' -long_doc = ''' +name = "since" +signature = "since(condition: bool) -> window" +short_doc = "Configures a windowed aggregation." +long_doc = """ Configures aggregations to window since the last time the `condition` was `true`. @@ -11,23 +11,23 @@ Configures aggregations to window since the last time the ### Results Returns a window behavior that can be used with an [aggregation](#aggregation-functions) to configure windowed aggregations. -''' -tags = ['window'] +""" +tags = ["window"] [[examples]] -name = 'Hourly Count' -description = ''' +name = "Hourly Count" +description = """ Produces the count since the start of the hour. NOTE: The time and key are not available on the rows created by the ticks. The expression here uses `extend`, `time_of` and `first` to compute the `time` and `key` columns for all rows. -''' -full_expression = ''' +""" +full_expression = """ { n: Input.n, result: count(Input, window = since(hourly())) } # Compute time and key for all rows, even the ticks. | extend({ time: time_of($input), key: first(Input.key) }) -''' -input_csv = ''' +""" +input_csv = """ time,key,n 1996-12-19T16:00:57-00:00,Ben,2 1996-12-19T16:00:58-00:00,Ryan,3 @@ -35,25 +35,25 @@ time,key,n 1996-12-19T17:03:00-00:00,Ben,9 1996-12-19T17:01:00-00:00,Ryan,8 1996-12-19T18:01:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n,result 1996-12-19T16:00:57.000000000,Ben,2,1 1996-12-19T16:00:58.000000000,Ryan,3,1 1996-12-19T17:00:00.000000000,Ben,9,2 -1996-12-19T17:00:00.000000000,Ben,,2 1996-12-19T17:00:00.000000000,Ryan,,1 +1996-12-19T17:00:00.000000000,Ben,,2 1996-12-19T17:01:00.000000000,Ryan,8,1 1996-12-19T17:03:00.000000000,Ben,9,1 -1996-12-19T18:00:00.000000000,Ben,,1 1996-12-19T18:00:00.000000000,Ryan,,1 +1996-12-19T18:00:00.000000000,Ben,,1 1996-12-19T18:01:00.000000000,Ben,1,1 -''' +""" [[examples]] -name = 'Count Since Predicate' -expression = 'count(Input, window = since(Input.n > 5))' -input_csv = ''' +name = "Count Since Predicate" +expression = "count(Input, window = since(Input.n > 5))" +input_csv = """ time,key,n 1996-12-19T16:00:57-00:00,Ben,2 1996-12-19T16:00:58-00:00,Ryan,3 @@ -61,8 +61,8 @@ time,key,n 1996-12-19T17:03:00-00:00,Ben,9 1996-12-19T17:01:00-00:00,Ryan,8 1996-12-19T18:01:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n,result 1996-12-19T16:00:57.000000000,Ben,2,1 1996-12-19T16:00:58.000000000,Ryan,3,1 @@ -70,4 +70,4 @@ time,key,n,result 1996-12-19T17:01:00.000000000,Ryan,8,2 1996-12-19T17:03:00.000000000,Ben,9,1 1996-12-19T18:01:00.000000000,Ben,1,1 -''' +""" diff --git a/crates/sparrow-catalog/catalog/sliding.toml b/crates/sparrow-catalog/catalog/sliding.toml index b77b7178e..1b9327af7 100644 --- a/crates/sparrow-catalog/catalog/sliding.toml +++ b/crates/sparrow-catalog/catalog/sliding.toml @@ -1,7 +1,7 @@ -name = 'sliding' -signature = 'sliding(const duration: i64, condition: bool) -> window' -short_doc = 'Configures sliding windowed aggregations.' -long_doc = ''' +name = "sliding" +signature = "sliding(const duration: i64, condition: bool) -> window" +short_doc = "Configures sliding windowed aggregations." +long_doc = """ Configures aggregations to slide over a window of inputs, where the width of the window is determined by the number of times (`duration`) the `condition` is `true`. @@ -19,62 +19,62 @@ and 9:00 PM. ### Results Returns a window behavior that can be used with an [aggregation](#aggregation-functions) to configure windowed aggregations. -''' -tags = ['window'] +""" +tags = ["window"] [[examples]] -name = 'Sliding Over 2 Days' -description = ''' +name = "Sliding Over 2 Days" +description = """ Produces the sum of `Input.n` over a window of 2 days. NOTE: The time and key are not available on the rows created by the ticks. The expression here uses `extend`, `time_of` and `first` to compute the `time` and `key` columns for all rows. -''' -full_expression = ''' +""" +full_expression = """ { n: Input.n, result: sum(Input.n, window = sliding(2, daily())) } # Compute time and key for all rows, even the ticks. | extend({ time: time_of($input), key: first(Input.key) }) -''' -input_csv = ''' +""" +input_csv = """ time,key,n 1996-12-19T00:00:00-00:00,Ben,1 1996-12-19T00:00:00-00:00,Ryan,2 1996-12-20T00:00:00-00:00,Ben,3 1996-12-20T01:00:00-00:00,Ben,4 1996-12-21T00:00:00-00:00,Ryan,5 -1996-12-21T00:00:00-00:00,Ben,6''' -output_csv = ''' +1996-12-21T00:00:00-00:00,Ben,6""" +output_csv = """ time,key,n,result 1996-12-19T00:00:00.000000000,Ben,1,1 1996-12-19T00:00:00.000000000,Ryan,2,2 -1996-12-19T00:00:00.000000000,Ben,,1 1996-12-19T00:00:00.000000000,Ryan,,2 +1996-12-19T00:00:00.000000000,Ben,,1 1996-12-20T00:00:00.000000000,Ben,3,4 -1996-12-20T00:00:00.000000000,Ben,,4 1996-12-20T00:00:00.000000000,Ryan,,2 +1996-12-20T00:00:00.000000000,Ben,,4 1996-12-20T01:00:00.000000000,Ben,4,7 1996-12-21T00:00:00.000000000,Ryan,5,5 1996-12-21T00:00:00.000000000,Ben,6,13 -1996-12-21T00:00:00.000000000,Ben,,13 1996-12-21T00:00:00.000000000,Ryan,,5 -''' +1996-12-21T00:00:00.000000000,Ben,,13 +""" [[examples]] -name = 'Sliding Over 3 Events' -description = ''' +name = "Sliding Over 3 Events" +description = """ In this example, the `condition` evaluates to `true` when the input is valid, meaning the width of the window is 3 `Input` rows. -''' -expression = 'mean(Input.n, window = sliding(3, is_valid(Input)))' -input_csv = ''' +""" +expression = "mean(Input.n, window = sliding(3, is_valid(Input)))" +input_csv = """ time,subsort,key,n 1996-12-19T00:00:00-00:00,0,Ben,1 1996-12-19T00:00:00-00:00,0,Ryan,2 1996-12-20T00:00:00-00:00,0,Ben,3 1996-12-20T01:00:00-00:00,0,Ben,4 1996-12-21T00:00:00-00:00,0,Ryan,5 -1996-12-21T00:00:00-00:00,0,Ben,6''' -output_csv = ''' +1996-12-21T00:00:00-00:00,0,Ben,6""" +output_csv = """ time,subsort,key,n,result 1996-12-19T00:00:00.000000000,0,Ben,1,1.0 1996-12-19T00:00:00.000000000,0,Ryan,2,2.0 @@ -82,4 +82,4 @@ time,subsort,key,n,result 1996-12-20T01:00:00.000000000,0,Ben,4,2.6666666666666665 1996-12-21T00:00:00.000000000,0,Ryan,5,3.5 1996-12-21T00:00:00.000000000,0,Ben,6,4.333333333333333 -''' +""" diff --git a/crates/sparrow-catalog/catalog/sqrt.toml b/crates/sparrow-catalog/catalog/sqrt.toml index 0352874d7..e03f0ccfd 100644 --- a/crates/sparrow-catalog/catalog/sqrt.toml +++ b/crates/sparrow-catalog/catalog/sqrt.toml @@ -1,7 +1,7 @@ -name = 'sqrt' -signature = 'sqrt(a: number) -> f64' -short_doc = 'Returns the square root of `a`.' -long_doc = ''' +name = "sqrt" +signature = "sqrt(a: number) -> f64" +short_doc = "Returns the square root of `a`." +long_doc = """ ### Parameters * a: The number to take the square root of. @@ -9,21 +9,21 @@ long_doc = ''' Returns a column of type `f64`. The result contains `null` if `a` was null at that row. Otherwise the row contains the square root of `a`. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Square Root' -expression = 'sqrt(Input.a)' -input_csv = ''' +name = "Square Root" +expression = "sqrt(Input.a)" +input_csv = """ time,key,a 2021-01-01T00:00:00.000000000Z,A,5.7 2021-01-01T00:00:00.000000000Z,A,6.3 2021-01-02T00:00:00.000000000Z,B, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,result 2021-01-01T00:00:00.000000000,A,5.7,2.3874672772626644 2021-01-01T00:00:00.000000000,A,6.3,2.5099800796022267 2021-01-02T00:00:00.000000000,B,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/stddev.toml b/crates/sparrow-catalog/catalog/stddev.toml index f2077f6fd..32c58f363 100644 --- a/crates/sparrow-catalog/catalog/stddev.toml +++ b/crates/sparrow-catalog/catalog/stddev.toml @@ -1,7 +1,7 @@ -name = 'stddev' -signature = 'stddev(input: number, window: window = null) -> f64' -short_doc = 'Computes the sample standard deviation of values across the input.' -long_doc = ''' +name = "stddev" +signature = "stddev(input: number, window: window = null) -> f64" +short_doc = "Computes the sample standard deviation of values across the input." +long_doc = """ Computes the sample standard deviation, which is the square root of the [sample variance](#variance). @@ -16,16 +16,16 @@ See [window functions](#window-functions) for how to specify the aggregation win For each input row, return the mean of new, non-`null` rows in `input` up to and including the input row for the given entity. Returns `null` until there has been at least two such inputs. -''' +""" tags = [ - 'aggregation', - 'math', + "aggregation", + "math", ] [[examples]] -name = 'Standard Deviation' -expression = 'stddev(Input.value)' -input_csv = ''' +name = "Standard Deviation" +expression = "stddev(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,50.7 2021-01-01T00:00:00.000000000Z,Ryan, @@ -33,8 +33,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,1.2 2021-01-04T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,2.3 -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,50.7, 2021-01-01T00:00:00.000000000,Ryan,, @@ -42,4 +42,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,1.2,24.750000000000004 2021-01-04T00:00:00.000000000,Ben,,24.750000000000004 2021-01-04T00:00:00.000000000,Ryan,2.3,32.45 -''' +""" diff --git a/crates/sparrow-catalog/catalog/sub.toml b/crates/sparrow-catalog/catalog/sub.toml index 176abf958..c60f03d16 100644 --- a/crates/sparrow-catalog/catalog/sub.toml +++ b/crates/sparrow-catalog/catalog/sub.toml @@ -1,8 +1,8 @@ -name = 'sub' -signature = 'sub(a: number, b: number) -> number' -operator = 'a - b' -short_doc = 'Returns the difference of two numbers.' -long_doc = ''' +name = "sub" +signature = "sub(a: number, b: number) -> number" +operator = "a - b" +short_doc = "Returns the difference of two numbers." +long_doc = """ This is the function used for the binary operation `a - b`. ### Parameters @@ -16,25 +16,25 @@ following the [numeric type coercion rules](docs:data-model#numeric-type-coercio Returns a numeric column of the promoted numeric type compatible with both `a` and `b`. The result contains `null` if `a` or `b` was null at that row. Otherwise the row contains the difference of `a` and `b`. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Subtraction' -expression = 'Input.a - Input.b' -input_csv = ''' +name = "Subtraction" +expression = "Input.a - Input.b" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,A,5.7,1.2 2021-01-02T00:00:00.000000000Z,A,6.3,0.4 2021-01-03T00:00:00.000000000Z,B,,3.7 2021-01-03T00:00:00.000000000Z,A,13.2, 2021-01-04T00:00:00.000000000Z,A,12.2,0 -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,A,5.7,1.2,4.5 2021-01-02T00:00:00.000000000,A,6.3,0.4,5.8999999999999995 2021-01-03T00:00:00.000000000,B,,3.7, 2021-01-03T00:00:00.000000000,A,13.2,, 2021-01-04T00:00:00.000000000,A,12.2,0.0,12.2 -''' +""" diff --git a/crates/sparrow-catalog/catalog/substring.toml b/crates/sparrow-catalog/catalog/substring.toml index e84908d35..de2cb5b10 100644 --- a/crates/sparrow-catalog/catalog/substring.toml +++ b/crates/sparrow-catalog/catalog/substring.toml @@ -1,7 +1,7 @@ -name = 'substring' -signature = 'substring(s: string, start: i64 = null, end: i64 = null) -> string' -short_doc = 'Takes a substring of the input between start and end indices.' -long_doc = ''' +name = "substring" +signature = "substring(s: string, start: i64 = null, end: i64 = null) -> string" +short_doc = "Takes a substring of the input between start and end indices." +long_doc = """ ### Parameters * s: The string to take a substring of. * start: The inclusive index to start at. `null` indicates the beginning of the @@ -15,12 +15,12 @@ starting at `start` (inclusive) up to but not including the `end`. If `s` is `null`, returns `null`. If `end > start` an empty string is returned. -''' -tags = ['string'] +""" +tags = ["string"] [[examples]] -name = 'Substring Suffix' -description = ''' +name = "Substring Suffix" +description = """ This example shows using the `substring` function to extract the last 3 characters of a string. Note that if the string is shorter than 3 characters the empty string is returned. @@ -28,9 +28,9 @@ is shorter than 3 characters the empty string is returned. Specifically, `-3` is interpreted as `len(s) - 3`, which produces a negative number for shorter strings, and is thus less than the start of the string (`0`). -''' -expression = 'Input.value | substring(start = -3)' -input_csv = ''' +""" +expression = "Input.value | substring(start = -3)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,Hello World 2021-01-02T00:00:00.000000000Z,Ryan, @@ -38,8 +38,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,Hello 2021-01-03T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,hi -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,Hello World,rld 2021-01-02T00:00:00.000000000,Ryan,, @@ -47,13 +47,13 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,Hello,llo 2021-01-03T00:00:00.000000000,Ben,, 2021-01-04T00:00:00.000000000,Ryan,hi, -''' +""" [[examples]] -name = 'Substring' -description = '' -expression = 'Input.value | substring(start = 3, end = -3)' -input_csv = ''' +name = "Substring" +description = "" +expression = "Input.value | substring(start = 3, end = -3)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,Hello World 2021-01-02T00:00:00.000000000Z,Ryan, @@ -61,8 +61,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,Hello 2021-01-03T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,hi -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,Hello World,lo Wo 2021-01-02T00:00:00.000000000,Ryan,, @@ -70,4 +70,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,Hello, 2021-01-03T00:00:00.000000000,Ben,, 2021-01-04T00:00:00.000000000,Ryan,hi, -''' +""" diff --git a/crates/sparrow-catalog/catalog/sum.toml b/crates/sparrow-catalog/catalog/sum.toml index 8ebe392c4..fe9fd0a56 100644 --- a/crates/sparrow-catalog/catalog/sum.toml +++ b/crates/sparrow-catalog/catalog/sum.toml @@ -1,7 +1,7 @@ -name = 'sum' -signature = 'sum(input: number, window: window = null) -> number' -short_doc = 'Computes the sum of values across the input.' -long_doc = ''' +name = "sum" +signature = "sum(input: number, window: window = null) -> number" +short_doc = "Computes the sum of values across the input." +long_doc = """ ### Parameters * input: The input to compute the sum of. * window: The window to aggregate within, as described in @@ -13,16 +13,16 @@ See [window functions](#window-functions) for how to specify the aggregation win For each input row, return the minimum of new, non-`null` rows in `input` up to and including the input row for the given entity. Returns `null` until there has been at least one such input. -''' +""" tags = [ - 'aggregation', - 'math', + "aggregation", + "math", ] [[examples]] -name = 'Sum' -expression = 'sum(Input.value)' -input_csv = ''' +name = "Sum" +expression = "sum(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,50.7 2021-01-01T00:00:00.000000000Z,Ryan, @@ -30,8 +30,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,1.2 2021-01-04T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,2.3 -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,50.7,50.7 2021-01-01T00:00:00.000000000,Ryan,, @@ -39,4 +39,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,1.2,51.900000000000006 2021-01-04T00:00:00.000000000,Ben,,51.900000000000006 2021-01-04T00:00:00.000000000,Ryan,2.3,69.5 -''' +""" diff --git a/crates/sparrow-catalog/catalog/time_of.toml b/crates/sparrow-catalog/catalog/time_of.toml index 0fa939bf5..0222b335a 100644 --- a/crates/sparrow-catalog/catalog/time_of.toml +++ b/crates/sparrow-catalog/catalog/time_of.toml @@ -1,20 +1,20 @@ -name = 'time_of' -signature = 'time_of(input: any) -> timestamp_ns' -short_doc = 'Returns the timestamp of rows in `input`.' -long_doc = ''' +name = "time_of" +signature = "time_of(input: any) -> timestamp_ns" +short_doc = "Returns the timestamp of rows in `input`." +long_doc = """ ### Parameters * input: The column to retrieve timestamps for. It may be of any type (including records). ### Results Returns a `timestamp_ns` column containing the timestamp of each row in the `input`. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Time Of Record Column' -expression = 'time_of(Input)' -input_csv = ''' +name = "Time Of Record Column" +expression = "time_of(Input)" +input_csv = """ time,key 1996-03-21T00:00:00-00:00,Ben 1996-04-21T00:00:00-00:00,Ryan @@ -22,8 +22,8 @@ time,key 1996-06-21T00:00:00-00:00,Ryan 1996-07-21T00:00:00-00:00,Ben 1996-08-21T00:00:00-00:00,Ben -''' -output_csv = ''' +""" +output_csv = """ time,key,result 1996-03-21T00:00:00.000000000,Ben,1996-03-21T00:00:00.000000000 1996-04-21T00:00:00.000000000,Ryan,1996-04-21T00:00:00.000000000 @@ -31,12 +31,12 @@ time,key,result 1996-06-21T00:00:00.000000000,Ryan,1996-06-21T00:00:00.000000000 1996-07-21T00:00:00.000000000,Ben,1996-07-21T00:00:00.000000000 1996-08-21T00:00:00.000000000,Ben,1996-08-21T00:00:00.000000000 -''' +""" [[examples]] -name = 'Time Of Integer Column' -expression = 'time_of(Input.integer)' -input_csv = ''' +name = "Time Of Integer Column" +expression = "time_of(Input.integer)" +input_csv = """ time,key,integer 1996-03-21T00:00:00-00:00,Ben,8 1996-04-21T00:00:00-00:00,Ryan,12 @@ -44,8 +44,8 @@ time,key,integer 1996-06-21T00:00:00-00:00,Ryan,37 1996-07-21T00:00:00-00:00,Ben, 1996-08-21T00:00:00-00:00,Ben,24 -''' -output_csv = ''' +""" +output_csv = """ time,key,integer,result 1996-03-21T00:00:00.000000000,Ben,8,1996-03-21T00:00:00.000000000 1996-04-21T00:00:00.000000000,Ryan,12,1996-04-21T00:00:00.000000000 @@ -53,4 +53,4 @@ time,key,integer,result 1996-06-21T00:00:00.000000000,Ryan,37,1996-06-21T00:00:00.000000000 1996-07-21T00:00:00.000000000,Ben,,1996-07-21T00:00:00.000000000 1996-08-21T00:00:00.000000000,Ben,24,1996-08-21T00:00:00.000000000 -''' +""" diff --git a/crates/sparrow-catalog/catalog/upper.toml b/crates/sparrow-catalog/catalog/upper.toml index 6c04b9b92..47de18956 100644 --- a/crates/sparrow-catalog/catalog/upper.toml +++ b/crates/sparrow-catalog/catalog/upper.toml @@ -1,7 +1,7 @@ -name = 'upper' -signature = 'upper(s: string) -> string' -short_doc = 'Converts the string to upper case.' -long_doc = ''' +name = "upper" +signature = "upper(s: string) -> string" +short_doc = "Converts the string to upper case." +long_doc = """ ### Parameters * s: The string to convert to upper case. @@ -9,13 +9,13 @@ long_doc = ''' Returns a `string` column with each row containing the string `s` from that row converted to all upper case. The row contains `null` if `s` is `null` in that row. -''' -tags = ['string'] +""" +tags = ["string"] [[examples]] -name = 'Upper Case' -expression = 'Input.value | upper()' -input_csv = ''' +name = "Upper Case" +expression = "Input.value | upper()" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,Hello World 2021-01-02T00:00:00.000000000Z,Ryan, @@ -23,8 +23,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,Hello 2021-01-03T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,hi -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,Hello World,HELLO WORLD 2021-01-02T00:00:00.000000000,Ryan,, @@ -32,4 +32,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,Hello,HELLO 2021-01-03T00:00:00.000000000,Ben,, 2021-01-04T00:00:00.000000000,Ryan,hi,HI -''' +""" diff --git a/crates/sparrow-catalog/catalog/variance.toml b/crates/sparrow-catalog/catalog/variance.toml index 2c11bf98a..7077e7ef1 100644 --- a/crates/sparrow-catalog/catalog/variance.toml +++ b/crates/sparrow-catalog/catalog/variance.toml @@ -1,7 +1,7 @@ -name = 'variance' -signature = 'variance(input: number, window: window = null) -> f64' -short_doc = 'Computes the sample variance of values across the input.' -long_doc = ''' +name = "variance" +signature = "variance(input: number, window: window = null) -> f64" +short_doc = "Computes the sample variance of values across the input." +long_doc = """ Computes the sample variance. This divides by the number of values minus 1, rather the number of values (which would be the population variance). @@ -16,16 +16,16 @@ See [window functions](#window-functions) for how to specify the aggregation win For each input row, return the mean of new, non-`null` rows in `input` up to and including the input row for the given entity. Returns `null` until there has been at least two such inputs. -''' +""" tags = [ - 'aggregation', - 'math', + "aggregation", + "math", ] [[examples]] -name = 'Variance' -expression = 'variance(Input.value)' -input_csv = ''' +name = "Variance" +expression = "variance(Input.value)" +input_csv = """ time,key,value 2021-01-01T00:00:00.000000000Z,Ben,50.7 2021-01-01T00:00:00.000000000Z,Ryan, @@ -33,8 +33,8 @@ time,key,value 2021-01-03T00:00:00.000000000Z,Ben,1.2 2021-01-04T00:00:00.000000000Z,Ben, 2021-01-04T00:00:00.000000000Z,Ryan,2.3 -''' -output_csv = ''' +""" +output_csv = """ time,key,value,result 2021-01-01T00:00:00.000000000,Ben,50.7, 2021-01-01T00:00:00.000000000,Ryan,, @@ -42,4 +42,4 @@ time,key,value,result 2021-01-03T00:00:00.000000000,Ben,1.2,612.5625000000001 2021-01-04T00:00:00.000000000,Ben,,612.5625000000001 2021-01-04T00:00:00.000000000,Ryan,2.3,1053.0025000000003 -''' +""" diff --git a/crates/sparrow-catalog/catalog/when.toml b/crates/sparrow-catalog/catalog/when.toml index 9022ba56d..478f6b10f 100644 --- a/crates/sparrow-catalog/catalog/when.toml +++ b/crates/sparrow-catalog/catalog/when.toml @@ -1,9 +1,9 @@ -name = 'when' -signature = 'when(condition: bool, value: any) -> any' -short_doc = 'Produces the current `value` when the `condition` evaluates to `true`.' -long_doc = ''' +name = "when" +signature = "when(condition: bool, value: any) -> any" +short_doc = "Produces the current `value` when the `condition` evaluates to `true`." +long_doc = """ Performs filtering of rows. -Unlike [`if`](#if) which just "nulls" out a value in the current row, this removes the row entirely. +Unlike [`if`](#if) which just \"nulls\" out a value in the current row, this removes the row entirely. ### Parameters * condition: Determines whether to include a given row. @@ -21,13 +21,13 @@ then this returns the latest result of the aggregation when `condition` is `true`. If the `value` is not continuous (eg., taken directly from events) then this returns the current `value` when the `condition` is `true`. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'When' -full_expression = 'Input | when(Input.condition)' -input_csv = ''' +name = "When" +full_expression = "Input | when(Input.condition)" +input_csv = """ time,key,condition 1996-03-21T00:00:00-00:00,Ben,true 1996-04-21T00:00:00-00:00,Ryan,true @@ -35,11 +35,11 @@ time,key,condition 1996-06-21T00:00:00-00:00,Ryan,true 1996-07-21T00:00:00-00:00,Ben,false 1996-08-21T00:00:00-00:00,Ben,true -''' -output_csv = ''' +""" +output_csv = """ time,key,condition 1996-03-21T00:00:00.000000000,Ben,true 1996-04-21T00:00:00.000000000,Ryan,true 1996-06-21T00:00:00.000000000,Ryan,true 1996-08-21T00:00:00.000000000,Ben,true -''' +""" diff --git a/crates/sparrow-catalog/catalog/with_key.toml b/crates/sparrow-catalog/catalog/with_key.toml index 857e87d5a..11f0e034c 100644 --- a/crates/sparrow-catalog/catalog/with_key.toml +++ b/crates/sparrow-catalog/catalog/with_key.toml @@ -1,12 +1,12 @@ -name = 'with_key' -signature = 'with_key(key: key, value: any, const grouping: string = null) -> any' -short_doc = 'Changes the grouping of the input `value`.' -experimental = ''' +name = "with_key" +signature = "with_key(key: key, value: any, const grouping: string = null) -> any" +short_doc = "Changes the grouping of the input `value`." +experimental = """ `with_key` is experimental functionality. You should expect the behavior to potentially change in the future. There may be issues when using this if multiple rows are assigned the same key. -''' -long_doc = ''' +""" +long_doc = """ ### Parameters * key: The new key to use for the grouping. * value: The value to be re-grouped. @@ -18,12 +18,12 @@ long_doc = ''' Returns a column containing the non-`null` rows of `value`. Each row occurs at the same time as in `value`. The results have been re-keyed based on the value of `key` to be part of the named `grouping`. -''' -tags = ['grouping'] +""" +tags = ["grouping"] [[examples]] -name = 'Changing Keys' -description = ''' +name = "Changing Keys" +description = """ This example starts with input grouped by the `key` column. We wish to instead compute aggregates grouped by the `other_key` column. We do this by using the `with_key` function to change the grouping. @@ -31,25 +31,25 @@ We use `other_key` as the name of the grouping so that this table is compatible After we have regrouped we compute the `sum`, which we see is grouped by the `other_key`. The `extend` function is used so that we can add fields to the regrouped record. -''' -full_expression = ''' +""" +full_expression = """ Input | with_key($input.other_key, grouping = 'other_key') | extend($input, { sum_n_by_other_key: sum($input.n) }) -''' -input_csv = ''' +""" +input_csv = """ time,key,other_key,n 2021-01-01T00:00:00.000000000Z,A,X,5 2021-01-02T00:00:00.000000000Z,A,Y,8 2021-03-01T00:00:00.000000000Z,B,X,9 2021-04-10T00:00:00.000000000Z,A,X, 2021-04-11T00:00:00.000000000Z,A,,9 -''' -output_csv = ''' +""" +output_csv = """ time,key,other_key,n,sum_n_by_other_key 2021-01-01T00:00:00.000000000,A,X,5,5 2021-01-02T00:00:00.000000000,A,Y,8,8 2021-03-01T00:00:00.000000000,B,X,9,14 2021-04-10T00:00:00.000000000,A,X,,14 2021-04-11T00:00:00.000000000,A,,9,9 -''' +""" diff --git a/crates/sparrow-catalog/catalog/year.toml b/crates/sparrow-catalog/catalog/year.toml index 92a073608..09424f8b1 100644 --- a/crates/sparrow-catalog/catalog/year.toml +++ b/crates/sparrow-catalog/catalog/year.toml @@ -1,20 +1,20 @@ -name = 'year' -signature = 'year(time: timestamp_ns) -> i32' -short_doc = 'Return the year of the given timestamp.' -long_doc = ''' +name = "year" +signature = "year(time: timestamp_ns) -> i32" +short_doc = "Return the year of the given timestamp." +long_doc = """ ### Parameters * time: The timestamp to return the year for. ### Results Returns an `i32` column containing the year for each input `time`. Returns `null` for rows where `time` is `null`. -''' -tags = ['time'] +""" +tags = ["time"] [[examples]] -name = 'Year' -expression = 'year(Input.time)' -input_csv = ''' +name = "Year" +expression = "year(Input.time)" +input_csv = """ time,key 1996-03-21T00:00:00-00:00,Ben 1997-04-21T00:00:00-00:00,Ryan @@ -22,8 +22,8 @@ time,key 2000-06-21T00:00:00-00:00,Ryan 2021-07-21T00:00:00-00:00,Ben 2022-08-21T00:00:00-00:00,Ben -''' -output_csv = ''' +""" +output_csv = """ time,key,result 1996-03-21T00:00:00.000000000,Ben,1996 1997-04-21T00:00:00.000000000,Ryan,1997 @@ -31,4 +31,4 @@ time,key,result 2000-06-21T00:00:00.000000000,Ryan,2000 2021-07-21T00:00:00.000000000,Ben,2021 2022-08-21T00:00:00.000000000,Ben,2022 -''' +""" diff --git a/crates/sparrow-catalog/catalog/yearly.toml b/crates/sparrow-catalog/catalog/yearly.toml index 8b589faa2..b6f786917 100644 --- a/crates/sparrow-catalog/catalog/yearly.toml +++ b/crates/sparrow-catalog/catalog/yearly.toml @@ -1,29 +1,29 @@ -name = 'yearly' -signature = 'yearly() -> bool' -short_doc = 'A periodic function that produces a `true` value at the start of each calendar year (UTC).' -long_doc = ''' +name = "yearly" +signature = "yearly() -> bool" +short_doc = "A periodic function that produces a `true` value at the start of each calendar year (UTC)." +long_doc = """ This function is often used in aggregations to produce windows or as a predicate column. ### Results Returns a boolean column with each row containing a `true` value at the start of each calendary yea rand `null` at all other times. -''' -tags = ['tick'] +""" +tags = ["tick"] [[examples]] -name = 'Yearly Aggregated Window' -description = ''' +name = "Yearly Aggregated Window" +description = """ In this example, the `yearly()` function is used as an argument to the [`since](#since) function, which produces a window. The result is a windowed aggregation that resets at the start of each calendar year (UTC). -''' -full_expression = ''' +""" +full_expression = """ { n: Input.n, yearly_sum: sum(Input.n, window = since(yearly())) } | extend({time: time_of($input), key: first(Input.key) }) -''' -input_csv = ''' +""" +input_csv = """ time,key,n 1996-12-19T16:00:00-00:00,Ben,2 1996-12-19T16:00:00-00:00,Ryan,3 @@ -31,31 +31,31 @@ time,key,n 1997-12-20T16:01:00-00:00,Ben,9 1997-12-21T16:00:00-00:00,Ryan,8 1998-12-21T16:00:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n,yearly_sum 1996-12-19T16:00:00.000000000,Ben,2,2 1996-12-19T16:00:00.000000000,Ryan,3,3 -1997-01-01T00:00:00.000000000,Ben,,2 1997-01-01T00:00:00.000000000,Ryan,,3 +1997-01-01T00:00:00.000000000,Ben,,2 1997-12-20T16:00:00.000000000,Ben,6,6 1997-12-20T16:01:00.000000000,Ben,9,15 1997-12-21T16:00:00.000000000,Ryan,8,8 -1998-01-01T00:00:00.000000000,Ben,,15 1998-01-01T00:00:00.000000000,Ryan,,8 +1998-01-01T00:00:00.000000000,Ben,,15 1998-12-21T16:00:00.000000000,Ben,1,1 -''' +""" [[examples]] -name = 'Filter Yearly' -description = ''' +name = "Filter Yearly" +description = """ In this example, the `yearly()` function is used as an argument to the [`when`](#when) function, which filters input. The output includes the last input row before a [`tick`](#tick) occurs. -''' -full_expression = 'Input | last() | when(yearly())' -input_csv = ''' +""" +full_expression = "Input | last() | when(yearly())" +input_csv = """ time,key,n 1996-12-19T16:00:00-00:00,Ben,2 1996-12-19T16:00:00-00:00,Ryan,3 @@ -63,11 +63,11 @@ time,key,n 1997-12-20T16:01:00-00:00,Ben,9 1997-12-21T16:00:00-00:00,Ryan,8 1998-12-21T16:00:00-00:00,Ben,1 -''' -output_csv = ''' +""" +output_csv = """ time,key,n -1996-12-19T16:00:00.000000000,Ben,2 1996-12-19T16:00:00.000000000,Ryan,3 -1997-12-20T16:01:00.000000000,Ben,9 +1996-12-19T16:00:00.000000000,Ben,2 1997-12-21T16:00:00.000000000,Ryan,8 -''' +1997-12-20T16:01:00.000000000,Ben,9 +""" diff --git a/crates/sparrow-catalog/catalog/zip_max.toml b/crates/sparrow-catalog/catalog/zip_max.toml index d1cdaf8ce..fcff49ae8 100644 --- a/crates/sparrow-catalog/catalog/zip_max.toml +++ b/crates/sparrow-catalog/catalog/zip_max.toml @@ -1,7 +1,7 @@ -name = 'zip_max' -signature = 'zip_max(a: ordered, b: ordered) -> ordered' -short_doc = 'Returns the maximum of two values.' -long_doc = ''' +name = "zip_max" +signature = "zip_max(a: ordered, b: ordered) -> ordered" +short_doc = "Returns the maximum of two values." +long_doc = """ This returns the maximum of two values. See the aggregation [`max`](#max) for the maximum of values in a column up to and including the current row. @@ -16,23 +16,23 @@ Returns a numeric column of the promoted type. Each row contains the value from `a` if `a` is greater than `b`, otherwise it contains `b`. Specifically, if `a` or `b` is `NaN` then `b` will be returned. If `a` or `b` are `null`, then `b` will be returned. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Zip Max' -expression = 'zip_max(Input.a, Input.b)' -input_csv = ''' +name = "Zip Max" +expression = "zip_max(Input.a, Input.b)" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,A,5.7,1.2 2021-01-01T00:00:00.000000000Z,A,6.3,0.4 2021-01-02T00:00:00.000000000Z,B,,3.7 2021-01-03T00:00:00.000000000Z,A,13.2, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,A,5.7,1.2,5.7 2021-01-01T00:00:00.000000000,A,6.3,0.4,6.3 2021-01-02T00:00:00.000000000,B,,3.7, 2021-01-03T00:00:00.000000000,A,13.2,, -''' +""" diff --git a/crates/sparrow-catalog/catalog/zip_min.toml b/crates/sparrow-catalog/catalog/zip_min.toml index a768ffcfe..a012985e8 100644 --- a/crates/sparrow-catalog/catalog/zip_min.toml +++ b/crates/sparrow-catalog/catalog/zip_min.toml @@ -1,7 +1,7 @@ -name = 'zip_min' -signature = 'zip_min(a: ordered, b: ordered) -> ordered' -short_doc = 'Returns the minimum of two values.' -long_doc = ''' +name = "zip_min" +signature = "zip_min(a: ordered, b: ordered) -> ordered" +short_doc = "Returns the minimum of two values." +long_doc = """ This returns the minimum of two values. See the aggregation [`min`](#min) for the minimum of values in a column up to and including the current row. @@ -16,23 +16,23 @@ Returns a numeric column of the promoted type. Each row contains the value from `a` if `a` is less than `b`, otherwise it contains `b`. Specifically, if `a` or `b` is `NaN` then `b` will be returned. If `a` or `b` are `null`, then `b` will be returned. -''' -tags = ['math'] +""" +tags = ["math"] [[examples]] -name = 'Zip Min' -expression = 'zip_min(Input.a, Input.b)' -input_csv = ''' +name = "Zip Min" +expression = "zip_min(Input.a, Input.b)" +input_csv = """ time,key,a,b 2021-01-01T00:00:00.000000000Z,A,5.7,1.2 2021-01-01T00:00:00.000000000Z,A,6.3,0.4 2021-01-02T00:00:00.000000000Z,B,,3.7 2021-01-03T00:00:00.000000000Z,A,13.2, -''' -output_csv = ''' +""" +output_csv = """ time,key,a,b,result 2021-01-01T00:00:00.000000000,A,5.7,1.2,1.2 2021-01-01T00:00:00.000000000,A,6.3,0.4,0.4 2021-01-02T00:00:00.000000000,B,,3.7, 2021-01-03T00:00:00.000000000,A,13.2,, -''' +""" diff --git a/crates/sparrow-compiler/Cargo.toml b/crates/sparrow-compiler/Cargo.toml index 47ddae9b4..186a17a0a 100644 --- a/crates/sparrow-compiler/Cargo.toml +++ b/crates/sparrow-compiler/Cargo.toml @@ -13,6 +13,7 @@ Compiler from Fenl syntax to Sparrow execution plans. ahash.workspace = true anyhow.workspace = true arrow.workspace = true +arrow-schema.workspace = true bit-set.workspace = true chrono.workspace = true const_format.workspace = true @@ -42,7 +43,7 @@ sparrow-arrow = { path = "../sparrow-arrow" } sparrow-core = { path = "../sparrow-core" } sparrow-instructions = { path = "../sparrow-instructions" } sparrow-kernels = { path = "../sparrow-kernels" } -sparrow-plan = { path = "../sparrow-plan" } +sparrow-merge = { path = "../sparrow-merge" } sparrow-syntax = { path = "../sparrow-syntax" } static_init.workspace = true strum.workspace = true diff --git a/crates/sparrow-compiler/src/ast_to_dfg.rs b/crates/sparrow-compiler/src/ast_to_dfg.rs index 490d3d20a..8d0e7f80c 100644 --- a/crates/sparrow-compiler/src/ast_to_dfg.rs +++ b/crates/sparrow-compiler/src/ast_to_dfg.rs @@ -6,20 +6,22 @@ mod window_args; #[cfg(test)] mod tests; -use std::rc::Rc; +use std::sync::Arc; use anyhow::{anyhow, Context}; use arrow::datatypes::{DataType, FieldRef}; +use arrow_schema::Field; pub use ast_dfg::*; use egg::Id; use itertools::{izip, Itertools}; use record_ops_to_dfg::*; use smallvec::{smallvec, SmallVec}; use sparrow_arrow::scalar_value::ScalarValue; -use sparrow_instructions::CastEvaluator; -use sparrow_plan::{GroupId, InstKind, InstOp}; +use sparrow_instructions::{CastEvaluator, Udf}; +use sparrow_instructions::{GroupId, InstKind, InstOp}; use sparrow_syntax::{ - ExprOp, FenlType, FormatDataType, LiteralValue, Located, Location, Resolved, ResolvedExpr, + Collection, ExprOp, FenlType, FormatDataType, LiteralValue, Located, Location, Resolved, + ResolvedExpr, }; use self::window_args::flatten_window_args; @@ -27,7 +29,7 @@ use crate::dfg::{Dfg, Expression, Operation}; use crate::diagnostics::DiagnosticCode; use crate::time_domain::TimeDomain; use crate::types::inference::instantiate; -use crate::{DataContext, DiagnosticBuilder, DiagnosticCollector}; +use crate::{DataContext, DiagnosticBuilder, DiagnosticCollector, TimeDomainCheck}; /// Convert the `expr` to corresponding DFG nodes. pub(super) fn ast_to_dfg( @@ -48,7 +50,7 @@ pub(super) fn ast_to_dfg( // Create the DFG for each argument. This is usually straightforward, unless // the operator has bind values. In that case, we need to handle the environment // specially. - let mut arguments = match expr.op() { + let arguments = match expr.op() { ExprOp::Pipe(_) => { let lhs = ast_to_dfg(data_context, dfg, diagnostics, &arguments[0])?; dfg.enter_env(); @@ -82,9 +84,91 @@ pub(super) fn ast_to_dfg( Ok(e.with_value(ast_dfg)) })?, }; + + add_to_dfg( + data_context, + dfg, + diagnostics, + expr.op(), + arguments, + Some(expr), + ) +} + +pub fn add_udf_to_dfg( + location: &Located, + udf: Arc, + dfg: &mut Dfg, + arguments: Resolved>, + data_context: &mut DataContext, + diagnostics: &mut DiagnosticCollector<'_>, +) -> anyhow::Result { let argument_types = arguments.transform(|i| i.with_value(i.value_type().clone())); + let signature = udf.signature(); + + let (instantiated_types, instantiated_result_type) = + match instantiate(location, &argument_types, signature) { + Ok(result) => result, + Err(diagnostic) => { + diagnostic.emit(diagnostics); + return Ok(dfg.error_node()); + } + }; + + if argument_types.iter().any(|arg| arg.is_error()) { + return Ok(dfg.error_node()); + } + let grouping = verify_same_partitioning( + data_context, + diagnostics, + &location.with_value(location.inner().as_str()), + &arguments, + )?; + + let args: Vec<_> = izip!(arguments, instantiated_types) + .map(|(arg, expected_type)| -> anyhow::Result<_> { + let ast_dfg = Arc::new(AstDfg::new( + cast_if_needed(dfg, arg.value(), arg.value_type(), &expected_type)?, + arg.is_new(), + expected_type, + arg.grouping(), + arg.time_domain().clone(), + arg.location().clone(), + None, + )); + Ok(arg.with_value(ast_dfg)) + }) + .try_collect()?; + + let is_new = dfg.add_udf(udf.clone(), args.iter().map(|i| i.value()).collect())?; + let value = is_any_new(dfg, &args)?; + + let time_domain_check = TimeDomainCheck::Compatible; + let time_domain = + time_domain_check.check_args(location.location(), diagnostics, &args, data_context)?; + + Ok(Arc::new(AstDfg::new( + value, + is_new, + instantiated_result_type, + grouping, + time_domain, + location.location().clone(), + None, + ))) +} - match expr.op() { +pub fn add_to_dfg( + data_context: &mut DataContext, + dfg: &mut Dfg, + diagnostics: &mut DiagnosticCollector<'_>, + op: &ExprOp, + mut arguments: Resolved>, + original_ast: Option<&ResolvedExpr>, +) -> anyhow::Result { + let argument_types = arguments.transform(|i| i.with_value(i.value_type().clone())); + + match op { ExprOp::Literal(literal) => { let (value_id, value_type) = match literal.inner() { LiteralValue::String(s) => { @@ -116,7 +200,7 @@ pub(super) fn ast_to_dfg( if CastEvaluator::is_supported_fenl(from_type, to_type) { if let FenlType::Concrete(to_type) = to_type.inner() { - return Ok(Rc::new(AstDfg::new( + return Ok(Arc::new(AstDfg::new( dfg.add_expression( Expression::Inst(InstKind::Cast(to_type.clone())), smallvec![input.value()], @@ -158,12 +242,7 @@ pub(super) fn ast_to_dfg( .with_note(if nearest.is_empty() { "No formulas, tables, or let-bound names available".to_owned() } else { - format!( - "Nearest matches: {}", - nearest - .iter() - .format_with(", ", |e, f| f(&format_args!("'{e}'"))) - ) + format!("Nearest matches: {nearest}") }) .emit(diagnostics); Ok(dfg.error_node()) @@ -173,41 +252,14 @@ pub(super) fn ast_to_dfg( let base = &arguments[0]; let base_type = &argument_types[0]; - let field_type = match base_type.inner() { - FenlType::Concrete(DataType::Struct(fields)) => { - if let Some(field) = fields.iter().find(|f| f.name() == field.inner()) { - field.data_type() - } else { - missing_field_diagnostic(fields, field.inner(), field.location()) - .emit(diagnostics); - return Ok(dfg.error_node()); - } - } - FenlType::Json => { - // This is a pseudo-hack that allows us to support json datatypes without - // a specific arrow-representable json type. All `json` functions are converted - // to `json_field` instructions that take a `string` and output a `string`, - // hence the `utf8` return type here. - &DataType::Utf8 - } - FenlType::Error => { - // The original error is already reported. + let field_type = match field_type(field, base_type.inner(), arguments[0].location()) { + Ok(Some(field_type)) => field_type, + Ok(None) => { + // already reported error. return Ok(dfg.error_node()); } - _ => { - DiagnosticCode::IllegalFieldRef - .builder() - .with_label( - // If the base is not a struct, that is the "primary" problem. - expr.arg(0) - .unwrap() - .location() - .primary_label() - .with_message(format!( - "No fields for non-record base type {base_type}" - )), - ) - .emit(diagnostics); + Err(diagnostic) => { + diagnostic.emit(diagnostics); return Ok(dfg.error_node()); } }; @@ -218,8 +270,8 @@ pub(super) fn ast_to_dfg( smallvec![base.value(), field_name], )?; let is_new = base.is_new(); - let value_type = field_type.clone().into(); - Ok(Rc::new(AstDfg::new( + let value_type = field_type.into(); + Ok(Arc::new(AstDfg::new( value, is_new, value_type, @@ -257,7 +309,7 @@ pub(super) fn ast_to_dfg( let agg_input_op = dfg.operation(agg_input.value()); let tick_input = smallvec![agg_input_op]; let tick_node = dfg.add_operation(Operation::Tick(behavior), tick_input)?; - let tick_node = Rc::new(AstDfg::new( + let tick_node = Arc::new(AstDfg::new( tick_node, tick_node, FenlType::Concrete(DataType::Boolean), @@ -270,13 +322,23 @@ pub(super) fn ast_to_dfg( } } + let signature = if original_ast.is_some() { + // If we have an original AST, then we're running from a Fenl file. + // In that case, we use the AST signature. + function.signature() + } else { + // If not, we're running from the builder, and can use the internal + // DFG signature. + function.internal_signature() + }; + let mut invalid = false; - for constant_index in function.signature().parameters().constant_indices() { + for constant_index in signature.parameters().constant_indices() { let argument = &arguments.values()[constant_index]; if dfg.literal(argument.value()).is_none() { invalid = true; - let argument_name = &function.signature().arg_names()[constant_index]; + let argument_name = &signature.arg_names()[constant_index]; DiagnosticCode::InvalidNonConstArgument .builder() .with_label(argument.location().primary_label().with_message(format!( @@ -291,7 +353,7 @@ pub(super) fn ast_to_dfg( } let (instantiated_types, instantiated_result_type) = - match instantiate(function_name, &argument_types, function.signature()) { + match instantiate(function_name, &argument_types, signature) { Ok(result) => result, Err(diagnostic) => { diagnostic.emit(diagnostics); @@ -490,7 +552,7 @@ pub(super) fn ast_to_dfg( // Add cast operations as necessary let args: Vec<_> = izip!(arguments, instantiated_types) .map(|(arg, expected_type)| -> anyhow::Result<_> { - let ast_dfg = Rc::new(AstDfg::new( + let ast_dfg = Arc::new(AstDfg::new( cast_if_needed(dfg, arg.value(), arg.value_type(), &expected_type)?, arg.is_new(), expected_type, @@ -504,44 +566,106 @@ pub(super) fn ast_to_dfg( .try_collect()?; let args: Vec<_> = if function.is_aggregation() { - // If the function is an aggregation, we may need to flatten the window. - dfg.enter_env(); - dfg.bind("$condition_input", args[0].inner().clone()); - - let window = &expr.args()[1]; - let (condition, duration) = match window.op() { - ExprOp::Call(window_name) => { - flatten_window_args(window_name, window, dfg, data_context, diagnostics)? + let window_arg = original_ast.map(|e| &e.args()[1]); + let (condition, duration) = match window_arg { + Some(window) => { + // If the function is an aggregation, we may need to flatten the window. + dfg.enter_env(); + dfg.bind("$condition_input", args[0].inner().clone()); + + let result = + flatten_window_args_if_needed(window, dfg, data_context, diagnostics)?; + dfg.exit_env(); + result } - ExprOp::Literal(v) if v.inner() == &LiteralValue::Null => { - // Unwindowed aggregations just use nulls - let null_arg = dfg.add_literal(LiteralValue::Null.to_scalar()?)?; - let null_arg = Located::new( - add_literal( - dfg, - null_arg, - FenlType::Concrete(DataType::Null), - window.location().clone(), - )?, - window.location().clone(), - ); - - (null_arg.clone(), null_arg) + None => { + // If `expr` is None, we're running the Python builder code, + // which already flattened things. + // + // Note that this won't define the `condition_input` for the + // purposes of ticks. + (args[1].clone(), args[2].clone()) } - unexpected => anyhow::bail!("expected window, found {:?}", unexpected), }; - dfg.exit_env(); // [agg_input, condition, duration] vec![args[0].clone(), condition, duration] + } else if function.name() == "collect" { + // The collect function contains a window, but does not follow the same signature + // pattern as aggregations, so it requires a different flattening strategy. + // + // TODO: Flattening the window arguments is hacky and confusing. We should instead + // incorporate the tick directly into the function containing the window. + let window_arg = original_ast.map(|e| &e.args()[3]); + let (condition, duration) = match window_arg { + Some(window) => { + dfg.enter_env(); + dfg.bind("$condition_input", args[0].inner().clone()); + + let result = + flatten_window_args_if_needed(window, dfg, data_context, diagnostics)?; + dfg.exit_env(); + result + } + None => { + // If `expr` is None, we're running the Python builder code, + // which already flattened things. + // + // Note that this won't define the `condition_input` for the + // purposes of ticks. + (args[3].clone(), args[4].clone()) + } + }; + + let min = dfg.literal(args[2].value()); + let max = dfg.literal(args[1].value()); + match (min, max) { + (Some(ScalarValue::Int64(Some(min))), Some(ScalarValue::Int64(Some(max)))) => { + if min > max { + DiagnosticCode::IllegalCast + .builder() + .with_label(args[2].location().primary_label().with_message( + format!( + "min '{min}' must be less than or equal to max '{max}'" + ), + )) + .emit(diagnostics); + } + } + (Some(_), Some(_)) => (), + (_, _) => panic!("previously verified min and max are scalar types"), + } + + // [input, max, min, condition, duration] + vec![ + args[0].clone(), + args[1].clone(), + args[2].clone(), + condition, + duration, + ] } else if function.name() == "when" || function.name() == "if" { - dfg.enter_env(); - dfg.bind("$condition_input", args[1].inner().clone()); + match original_ast { + Some(original_ast) => { + dfg.enter_env(); + dfg.bind("$condition_input", args[1].inner().clone()); - let condition = ast_to_dfg(data_context, dfg, diagnostics, &expr.args()[0])?; - dfg.exit_env(); - // [condition, value] - vec![expr.args()[0].with_value(condition), args[1].clone()] + let condition = original_ast.args()[0].as_ref().try_map(|condition| { + ast_to_dfg(data_context, dfg, diagnostics, condition) + })?; + + dfg.exit_env(); + vec![condition, args[1].clone()] + } + None => { + // If `expr` is None, we're running the Python builder code, + // which already flattened things. + // + // Note that this won't define the `condition_input` for the + // purposes of ticks. + vec![args[0].clone(), args[1].clone()] + } + } } else { args }; @@ -589,6 +713,37 @@ pub(super) fn ast_to_dfg( } } +#[allow(clippy::type_complexity)] +fn flatten_window_args_if_needed( + window: &Located>, + dfg: &mut Dfg, + data_context: &mut DataContext, + diagnostics: &mut DiagnosticCollector<'_>, +) -> anyhow::Result<(Located>, Located>)> { + let (condition, duration) = match window.op() { + ExprOp::Call(window_name) => { + flatten_window_args(window_name, window, dfg, data_context, diagnostics)? + } + ExprOp::Literal(v) if v.inner() == &LiteralValue::Null => { + // Unwindowed aggregations just use nulls + let null_arg = dfg.add_literal(LiteralValue::Null.to_scalar()?)?; + let null_arg = Located::new( + add_literal( + dfg, + null_arg, + FenlType::Concrete(DataType::Null), + window.location().clone(), + )?, + window.location().clone(), + ); + + (null_arg.clone(), null_arg) + } + unexpected => anyhow::bail!("expected window, found {:?}", unexpected), + }; + Ok((condition, duration)) +} + // Verify that the arguments are compatibly partitioned. fn verify_same_partitioning( data_context: &DataContext, @@ -653,6 +808,13 @@ fn cast_if_needed( { Ok(value) } + // Ensures that list types with the same inner types are compatible, regardless of the (arbitary) field naming. + (FenlType::Concrete(DataType::List(s)), FenlType::Concrete(DataType::List(s2))) + if list_types_are_equal(s, s2) => + { + Ok(value) + } + (FenlType::Concrete(DataType::Null), FenlType::Window) => Ok(value), ( FenlType::Concrete(DataType::Struct(actual_fields)), @@ -712,6 +874,14 @@ fn map_types_are_equal(a: &FieldRef, b: &FieldRef) -> bool { } } +// When constructing the concrete list during inference, we use arbitary names for the inner data +// field since we don't have access to the user's naming patterns there. +// By comparing the list types based on just the inner type, we can ensure that the types are +// still treated as equal. +fn list_types_are_equal(a: &FieldRef, b: &FieldRef) -> bool { + a.data_type() == b.data_type() +} + pub(crate) fn is_any_new(dfg: &mut Dfg, arguments: &[Located]) -> anyhow::Result { let mut argument_is_new = arguments.iter().map(|a| a.is_new()).unique(); let mut result = argument_is_new @@ -730,7 +900,7 @@ fn add_literal( location: Location, ) -> anyhow::Result { let is_new = dfg.add_literal(false)?; - Ok(Rc::new(AstDfg::new( + Ok(Arc::new(AstDfg::new( value, is_new, value_type, @@ -758,15 +928,55 @@ fn missing_field_diagnostic( .with_note(if fields.is_empty() { "No fields available on base record".to_owned() } else { - let candidates = crate::nearest_matches::nearest_matches( + let candidates = crate::nearest_matches::NearestMatches::new_nearest_strs( field_name, - fields.iter().map(|f| f.name()), + fields.iter().map(|f| f.name().as_str()), ); - format!( - "Nearest fields: {}", - candidates - .iter() - .format_with(", ", |name, f| f(&format_args!("'{name}'"))) - ) + format!("Nearest fields: {candidates}",) }) } + +fn field_type( + field: &Located, + base_type: &FenlType, + base_location: &Location, +) -> Result, DiagnosticBuilder> { + match base_type { + FenlType::Concrete(DataType::Struct(fields)) => { + if let Some(field) = fields.iter().find(|f| f.name() == field.inner()) { + Ok(Some(field.data_type().clone())) + } else { + Err(missing_field_diagnostic( + fields, + field.inner(), + field.location(), + )) + } + } + FenlType::Json => { + // This is a pseudo-hack that allows us to support json datatypes without + // a specific arrow-representable json type. All `json` functions are converted + // to `json_field` instructions that take a `string` and output a `string`, + // hence the `utf8` return type here. + Ok(Some(DataType::Utf8)) + } + FenlType::Error => { + // The original error is already reported. + Ok(None) + } + _ => { + if let Some(args) = base_type.collection_args(&Collection::List) { + let item_type = field_type(field, &args[0], base_location)?; + Ok(item_type + .map(|item_type| DataType::List(Arc::new(Field::new("item", item_type, true))))) + } else { + Err(DiagnosticCode::IllegalFieldRef.builder().with_label( + // If the base is not a struct, that is the "primary" problem. + base_location + .primary_label() + .with_message(format!("No fields for non-record base type {base_type}")), + )) + } + } + } +} diff --git a/crates/sparrow-compiler/src/ast_to_dfg/ast_dfg.rs b/crates/sparrow-compiler/src/ast_to_dfg/ast_dfg.rs index e4c95fd12..74334ea9b 100644 --- a/crates/sparrow-compiler/src/ast_to_dfg/ast_dfg.rs +++ b/crates/sparrow-compiler/src/ast_to_dfg/ast_dfg.rs @@ -1,8 +1,7 @@ -use std::cell::RefCell; -use std::rc::Rc; +use std::sync::{Arc, Mutex}; use egg::Id; -use sparrow_plan::GroupId; +use sparrow_instructions::GroupId; use sparrow_syntax::{FenlType, Location}; use crate::time_domain::TimeDomain; @@ -11,22 +10,22 @@ use crate::time_domain::TimeDomain; /// /// We may have multiple references to the same AstDfg node, so this allows us /// to clone shallow references rather than deeply copy. -pub type AstDfgRef = Rc; +pub type AstDfgRef = Arc; /// Various information produced for each AST node during the conversion to the /// DFG. -#[derive(Clone, Debug)] +#[derive(Debug)] pub struct AstDfg { /// Reference to the step containing the values of the AST node. /// /// Wrapped in a `RefCell` to allow mutability during /// pruning/simplification. - pub(crate) value: RefCell, + pub(crate) value: Mutex, /// Reference to the step containing the "is_new" bits of the AST node. /// /// Wrapped in a `RefCell` to allow mutability during /// pruning/simplification. - pub(crate) is_new: RefCell, + pub(crate) is_new: Mutex, /// Type of `value` produced. value_type: FenlType, /// Which entity grouping the node is associated with (if any). @@ -77,8 +76,8 @@ impl AstDfg { }; AstDfg { - value: RefCell::new(value), - is_new: RefCell::new(is_new), + value: Mutex::new(value), + is_new: Mutex::new(is_new), value_type, grouping, time_domain, @@ -87,12 +86,12 @@ impl AstDfg { } } - pub(crate) fn value(&self) -> Id { - *self.value.borrow() + pub fn value(&self) -> Id { + *self.value.lock().unwrap() } - pub(crate) fn is_new(&self) -> Id { - *self.is_new.borrow() + pub fn is_new(&self) -> Id { + *self.is_new.lock().unwrap() } pub fn value_type(&self) -> &FenlType { @@ -103,7 +102,7 @@ impl AstDfg { self.grouping } - pub(crate) fn time_domain(&self) -> &TimeDomain { + pub fn time_domain(&self) -> &TimeDomain { &self.time_domain } diff --git a/crates/sparrow-compiler/src/ast_to_dfg/record_ops_to_dfg.rs b/crates/sparrow-compiler/src/ast_to_dfg/record_ops_to_dfg.rs index c37c3eab7..026b5b480 100644 --- a/crates/sparrow-compiler/src/ast_to_dfg/record_ops_to_dfg.rs +++ b/crates/sparrow-compiler/src/ast_to_dfg/record_ops_to_dfg.rs @@ -1,4 +1,4 @@ -use std::rc::Rc; +use std::sync::Arc; use anyhow::Context; use arrow::datatypes::{DataType, Field, FieldRef}; @@ -6,7 +6,7 @@ use hashbrown::HashSet; use itertools::{izip, Itertools}; use smallvec::{smallvec, SmallVec}; use sparrow_arrow::scalar_value::ScalarValue; -use sparrow_plan::InstKind; +use sparrow_instructions::InstKind; use sparrow_syntax::{ArgVec, FenlType, Located, Location, Resolved}; use crate::ast_to_dfg::{is_any_new, missing_field_diagnostic, verify_same_partitioning}; @@ -137,7 +137,7 @@ pub(super) fn record_to_dfg( // Create the value after the fields since this takes ownership of the names. let value = dfg.add_expression(Expression::Inst(InstKind::Record), instruction_args)?; - Ok(Rc::new(AstDfg::new( + Ok(Arc::new(AstDfg::new( value, is_new, value_type, @@ -250,7 +250,7 @@ pub(super) fn extend_record_to_dfg( TimeDomain::error() }); - Ok(Rc::new(AstDfg::new( + Ok(Arc::new(AstDfg::new( value, is_new, value_type, @@ -377,7 +377,7 @@ pub(super) fn select_remove_fields( let value = dfg.add_expression(Expression::Inst(InstKind::Record), record_args)?; let value_type = FenlType::Concrete(DataType::Struct(result_fields.into())); - Ok(Rc::new(AstDfg::new( + Ok(Arc::new(AstDfg::new( value, record.is_new(), value_type, diff --git a/crates/sparrow-compiler/src/ast_to_dfg/window_args.rs b/crates/sparrow-compiler/src/ast_to_dfg/window_args.rs index ddb5ed951..337b49c71 100644 --- a/crates/sparrow-compiler/src/ast_to_dfg/window_args.rs +++ b/crates/sparrow-compiler/src/ast_to_dfg/window_args.rs @@ -25,20 +25,20 @@ pub(crate) fn flatten_window_args( window.args().len() ); - // Since aggregations have a single active window - let duration_id = dfg.add_literal(LiteralValue::Number(String::from("1")).to_scalar()?)?; - let duration = Located::new( + // Since aggregations use a null duration + let null_arg = dfg.add_literal(LiteralValue::Null.to_scalar()?)?; + let null_arg = Located::new( add_literal( dfg, - duration_id, - FenlType::Concrete(DataType::Int64), - name.location().clone(), + null_arg, + FenlType::Concrete(DataType::Null), + window.location().clone(), )?, - name.location().clone(), + window.location().clone(), ); let condition = crate::ast_to_dfg(data_context, dfg, diagnostics, &window.args()[0])?; - Ok((window.with_value(condition), duration)) + Ok((window.with_value(condition), null_arg)) } else if name.inner() == "sliding" { debug_assert!( window.args().len() == 2, diff --git a/crates/sparrow-compiler/src/data_context.rs b/crates/sparrow-compiler/src/data_context.rs index 84b81b2a0..16f5316bf 100644 --- a/crates/sparrow-compiler/src/data_context.rs +++ b/crates/sparrow-compiler/src/data_context.rs @@ -1,5 +1,4 @@ use std::collections::BTreeMap; -use std::rc::Rc; use std::sync::Arc; use anyhow::Context; @@ -7,17 +6,19 @@ use arrow::datatypes::{DataType, SchemaRef}; use sparrow_api::kaskada::v1alpha::slice_plan::Slice; use sparrow_api::kaskada::v1alpha::{compute_table, ComputeTable, PreparedFile, TableConfig}; use sparrow_core::context_code; -use sparrow_plan::{GroupId, TableId}; +use sparrow_instructions::{GroupId, TableId}; +use sparrow_merge::InMemoryBatches; use sparrow_syntax::Location; use uuid::Uuid; -use crate::dfg::Operation; +use crate::dfg::{Dfg, Operation}; +use crate::AstDfgRef; /// Represents the "data context" for a compilation. /// /// Specifically, this holds the information about the tables /// available to the compilation. -#[derive(Default, Debug)] +#[derive(Default, Debug, Clone)] pub struct DataContext { /// Information about the groupings in the context. group_info: Vec, @@ -99,7 +100,7 @@ impl DataContext { } /// Add a table to the data context. - pub fn add_table(&mut self, table: ComputeTable) -> anyhow::Result { + pub fn add_table(&mut self, table: ComputeTable) -> anyhow::Result<&mut TableInfo> { let config = table .config .as_ref() @@ -164,8 +165,8 @@ impl DataContext { let table_uuid = Uuid::parse_str(&config.uuid).context("parsing string to table uuid")?; let table_id = TableId::new(table_uuid.to_owned()); let table_info = TableInfo::try_new(table_id, group_id, schema, table)?; - self.table_info.insert(table_id, table_info); - Ok(table_id) + let table_info = self.table_info.entry(table_id).or_insert(table_info); + Ok(table_info) } pub fn table_infos(&self) -> impl Iterator { @@ -173,44 +174,12 @@ impl DataContext { } /// Creates a DFG with nodes for the corresponding tables. - pub(crate) fn create_dfg(&self) -> anyhow::Result { - use smallvec::smallvec; - use sparrow_plan::InstOp; - use sparrow_syntax::FenlType; - - use crate::ast_to_dfg::AstDfg; - use crate::dfg::Dfg; - use crate::time_domain::TimeDomain; - + pub(crate) fn create_dfg(&self) -> anyhow::Result { let mut dfg = Dfg::default(); // Add the tables from the context to the DFG. - for (table_id, table_info) in self.table_info.iter() { - // Add the table reference to the environment. - let value = dfg.add_operation( - Operation::Scan { - table_id: *table_id, - slice: None, - }, - smallvec![], - )?; - let is_new = dfg.add_instruction(InstOp::IsValid, smallvec![value])?; - - let value_type = DataType::Struct(table_info.schema().fields().clone()); - let value_type = FenlType::Concrete(value_type); - - dfg.bind( - table_info.name(), - Rc::new(AstDfg::new( - value, - is_new, - value_type, - Some(table_info.group_id()), - TimeDomain::table(*table_id), - // TODO: Include a [FeatureSetPart] for internal nodes. - Location::internal_str("table_definition"), - None, - )), - ); + for table_info in self.table_info.values() { + let dfg_node = table_info.dfg_node(&mut dfg)?; + dfg.bind(table_info.name(), dfg_node); } Ok(dfg) } @@ -328,14 +297,14 @@ impl DataContext { } /// Information about groups. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct GroupInfo { name: String, key_type: DataType, } /// Information about tables. -#[derive(Clone, Debug)] +#[derive(Debug, Clone)] pub struct TableInfo { table_id: TableId, group_id: GroupId, @@ -346,6 +315,8 @@ pub struct TableInfo { /// Each file set corresponds to the files for the table with a specific /// slice configuration. file_sets: Vec, + /// An in-memory record batch for the contents of the table. + pub in_memory: Option>, } impl TableInfo { @@ -367,6 +338,7 @@ impl TableInfo { schema, config, file_sets, + in_memory: None, }) } @@ -432,6 +404,39 @@ impl TableInfo { }) .collect() } + + pub fn dfg_node(&self, dfg: &mut Dfg) -> anyhow::Result { + use smallvec::smallvec; + use sparrow_instructions::InstOp; + use sparrow_syntax::FenlType; + + use crate::ast_to_dfg::AstDfg; + use crate::time_domain::TimeDomain; + + // Add the table reference to the environment. + let value = dfg.add_operation( + Operation::Scan { + table_id: self.table_id, + slice: None, + }, + smallvec![], + )?; + let is_new = dfg.add_instruction(InstOp::IsValid, smallvec![value])?; + + let value_type = DataType::Struct(self.schema().fields().clone()); + let value_type = FenlType::Concrete(value_type); + + Ok(Arc::new(AstDfg::new( + value, + is_new, + value_type, + Some(self.group_id()), + TimeDomain::table(self.table_id), + // TODO: Include a [FeatureSetPart] for internal nodes. + Location::internal_str("table_definition"), + None, + ))) + } } impl GroupInfo { diff --git a/crates/sparrow-compiler/src/dfg.rs b/crates/sparrow-compiler/src/dfg.rs index 83a00dc6d..45301d9c7 100644 --- a/crates/sparrow-compiler/src/dfg.rs +++ b/crates/sparrow-compiler/src/dfg.rs @@ -28,7 +28,7 @@ pub mod simplification; mod step_kind; mod useless_transforms; -use std::rc::Rc; +use std::sync::Arc; pub(crate) use analysis::*; use anyhow::Context; @@ -37,7 +37,7 @@ use hashbrown::HashMap; use itertools::{izip, Itertools}; pub(crate) use language::ChildrenVec; use sparrow_arrow::scalar_value::ScalarValue; -use sparrow_plan::{InstKind, InstOp}; +use sparrow_instructions::{InstKind, InstOp, Udf}; use sparrow_syntax::{FenlType, Location}; pub(crate) use step_kind::*; type DfgGraph = egg::EGraph; @@ -45,17 +45,18 @@ pub(super) use expr::DfgExpr; pub(crate) use pattern::*; use smallvec::smallvec; use tracing::{info, info_span}; -pub(crate) use useless_transforms::*; +pub use useless_transforms::*; use crate::ast_to_dfg::AstDfg; use crate::dfg::language::DfgLang; use crate::env::Env; +use crate::nearest_matches::NearestMatches; use crate::time_domain::TimeDomain; use crate::{AstDfgRef, CompilerOptions}; #[derive(Debug)] /// A wrapper around the DFG construction / manipulation functions. -pub(super) struct Dfg { +pub struct Dfg { /// The DFG being built/manipulated. graph: DfgGraph, /// A mapping from identifiers to corresponding DFG nodes. @@ -80,7 +81,7 @@ impl Default for Dfg { // Preemptively create a single error node, allowing for shallow // clones of the reference. let error_id = graph.add(DfgLang::new(StepKind::Error, smallvec![])); - let error_node = Rc::new(AstDfg::new( + let error_node = Arc::new(AstDfg::new( error_id, error_id, FenlType::Error, @@ -106,9 +107,8 @@ impl Default for Dfg { } impl Dfg { - pub(super) fn add_literal(&mut self, literal: impl Into) -> anyhow::Result { + pub fn add_literal(&mut self, literal: impl Into) -> anyhow::Result { let literal = literal.into(); - // TODO: FRAZ - do I need to support large string literal here? if let ScalarValue::Utf8(Some(literal)) = literal { self.add_string_literal(&literal) } else { @@ -125,6 +125,15 @@ impl Dfg { self.add_expression(Expression::Inst(InstKind::Simple(instruction)), children) } + /// Add a udf node to the DFG. + pub(super) fn add_udf( + &mut self, + udf: Arc, + children: ChildrenVec, + ) -> anyhow::Result { + self.add_expression(Expression::Inst(InstKind::Udf(udf)), children) + } + /// Add an expression to the DFG. pub(super) fn add_expression( &mut self, @@ -253,7 +262,7 @@ impl Dfg { ); } - // 2. The number of arguments should be correct. + // 2. The number of args should be correct. match expr { Expression::Literal(_) | Expression::LateBound(_) => { anyhow::ensure!( @@ -264,7 +273,10 @@ impl Dfg { ); } Expression::Inst(InstKind::Simple(op)) => op - .signature(sparrow_plan::Mode::Plan) + .signature() + .assert_valid_argument_count(children.len() - 1), + Expression::Inst(InstKind::Udf(udf)) => udf + .signature() .assert_valid_argument_count(children.len() - 1), Expression::Inst(InstKind::FieldRef) => { anyhow::ensure!( @@ -352,19 +364,19 @@ impl Dfg { /// /// # Error /// Returns an error containing the (up-to-5) nearest matches. - pub(super) fn get_binding(&self, name: &str) -> Result> { + pub(super) fn get_binding(&self, name: &str) -> Result> { if let Some(found) = self.env.get(name) { Ok(found.clone()) } else { - Err(crate::nearest_matches::nearest_matches( + Err(crate::nearest_matches::NearestMatches::new_nearest_strs( name, - self.env.keys(), + self.env.keys().map(|s| s.as_str()), )) } } /// Runs simplifications on the graph. - pub(crate) fn run_simplifications(&mut self, options: &CompilerOptions) -> anyhow::Result<()> { + pub fn run_simplifications(&mut self, options: &CompilerOptions) -> anyhow::Result<()> { let span = info_span!("Running simplifications"); let _enter = span.enter(); @@ -375,7 +387,7 @@ impl Dfg { } /// Extract the simplest representation of the node `id` from the graph. - pub(crate) fn extract_simplest(&self, id: Id) -> DfgExpr { + pub fn extract_simplest(&self, id: Id) -> DfgExpr { let span = info_span!("Extracting simplest DFG"); let _enter = span.enter(); @@ -447,7 +459,7 @@ impl Dfg { /// Remove nodes that aren't needed for the `output` from the graph. /// /// Returns the new ID of the `output`. - pub(crate) fn prune(&mut self, output: Id) -> anyhow::Result { + pub fn prune(&mut self, output: Id) -> anyhow::Result { // The implementation is somewhat painful -- we extract a `RecExpr`, and then // recreate the EGraph. This has the desired property -- only referenced nodes // are extracted. But, it may cause the IDs to change. @@ -488,12 +500,12 @@ impl Dfg { }); self.env.foreach_value(|node| { let old_value = old_graph.find(node.value()); - node.value - .replace_with(|_| mapping.get(&old_value).copied().unwrap_or(new_error)); + let new_value = mapping.get(&old_value).copied().unwrap_or(new_error); + *node.value.lock().unwrap() = new_value; let old_is_new = old_graph.find(node.is_new()); - node.is_new - .replace_with(|_| mapping.get(&old_is_new).copied().unwrap_or(new_error)); + let new_is_new = mapping.get(&old_is_new).copied().unwrap_or(new_error); + *node.is_new.lock().unwrap() = new_is_new; }); self.graph = new_graph; Ok(new_output) @@ -504,6 +516,15 @@ impl Dfg { self.graph[id].data.literal_opt() } + /// Returns `Some(str)` if the ID is a string literal in the graph. + pub fn string_literal(&self, id: Id) -> Option<&str> { + self.literal(id).and_then(|s| match s { + ScalarValue::Utf8(s) => s.as_ref().map(|s| s.as_str()), + ScalarValue::LargeUtf8(s) => s.as_ref().map(|s| s.as_str()), + _ => None, + }) + } + /// Returns the ID of the operation node defining the domain of `id`. pub fn operation(&self, id: Id) -> Id { self.graph[id].data.operation(id) @@ -526,7 +547,7 @@ impl Dfg { } #[cfg(test)] - pub fn data(&self, id: Id) -> &DfgAnalysisData { + pub(crate) fn data(&self, id: Id) -> &DfgAnalysisData { &self.graph[id].data } } diff --git a/crates/sparrow-compiler/src/dfg/analysis.rs b/crates/sparrow-compiler/src/dfg/analysis.rs index 13eb7487b..3e223dff9 100644 --- a/crates/sparrow-compiler/src/dfg/analysis.rs +++ b/crates/sparrow-compiler/src/dfg/analysis.rs @@ -9,7 +9,7 @@ use hashbrown::HashMap; use itertools::Itertools; use smallvec::smallvec; use sparrow_arrow::scalar_value::ScalarValue; -use sparrow_plan::{InstKind, InstOp}; +use sparrow_instructions::{InstKind, InstOp}; use super::{DfgGraph, DfgLang, StepKind}; use crate::dfg::Expression; diff --git a/crates/sparrow-compiler/src/dfg/const_eval.rs b/crates/sparrow-compiler/src/dfg/const_eval.rs index 1fce5dc22..c57f84ab0 100644 --- a/crates/sparrow-compiler/src/dfg/const_eval.rs +++ b/crates/sparrow-compiler/src/dfg/const_eval.rs @@ -4,7 +4,7 @@ use sparrow_arrow::scalar_value::ScalarValue; use sparrow_instructions::{ ColumnarValue, ComputeStore, Evaluator, GroupingIndices, RuntimeInfo, StaticArg, StaticInfo, }; -use sparrow_plan::{InstKind, InstOp, ValueRef}; +use sparrow_instructions::{InstKind, InstOp, ValueRef}; use crate::types::instruction::typecheck_inst; @@ -34,7 +34,6 @@ pub(super) fn evaluate_constant( // now to fix various panics caused by not having *some* behavior defined. InstOp::CountIf => return Ok(ScalarValue::UInt32(Some(0))), InstOp::First => return Ok(inputs[0].null()), - InstOp::Lag => return Ok(inputs[0].null()), InstOp::Last => return Ok(inputs[0].null()), InstOp::Max => return Ok(inputs[0].null()), InstOp::Mean => return Ok(ScalarValue::Float64(None)), @@ -139,12 +138,7 @@ pub(super) fn evaluate_constant( let argument_types = args.iter().map(|i| i.data_type.clone().into()).collect(); let argument_literals: Vec<_> = inputs.into_iter().map(Some).collect(); - let result_type = typecheck_inst( - kind, - argument_types, - &argument_literals, - sparrow_plan::Mode::Dfg, - )?; + let result_type = typecheck_inst(kind, argument_types, &argument_literals)?; let result_type = result_type.arrow_type().with_context(|| { format!( "Expected result of literal instruction to have concrete type, but got {result_type:?}" @@ -205,7 +199,7 @@ impl RuntimeInfo for ConstEvaluator { #[cfg(test)] mod tests { use sparrow_arrow::scalar_value::ScalarValue; - use sparrow_plan::{InstKind, InstOp}; + use sparrow_instructions::{InstKind, InstOp}; use strum::IntoEnumIterator; #[test] @@ -220,13 +214,7 @@ mod tests { } let kind = InstKind::Simple(inst_op); - let inputs = vec![ - ScalarValue::Null; - inst_op - .signature(sparrow_plan::Mode::Dfg) - .parameters() - .len() - ]; + let inputs = vec![ScalarValue::Null; inst_op.signature().parameters().len()]; if let Err(e) = super::evaluate_constant(&kind, inputs) { println!("Failed to evaluate '{inst_op}': {e:?}"); diff --git a/crates/sparrow-compiler/src/dfg/dfg_to_dot.rs b/crates/sparrow-compiler/src/dfg/dfg_to_dot.rs index 5c7ee989f..54819792e 100644 --- a/crates/sparrow-compiler/src/dfg/dfg_to_dot.rs +++ b/crates/sparrow-compiler/src/dfg/dfg_to_dot.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use itertools::Itertools; -use sparrow_plan::InstKind; +use sparrow_instructions::InstKind; use super::StepKind; use crate::dfg::{DfgExpr, Expression}; @@ -90,5 +90,8 @@ fn node_label(kind: &StepKind) -> Cow<'static, str> { StepKind::Transform => Cow::Borrowed("transform"), StepKind::Error => Cow::Borrowed("error"), StepKind::Window(window) => Cow::Borrowed(window.label()), + StepKind::Expression(Expression::Inst(InstKind::Udf(udf))) => { + Cow::Owned(udf.signature().name().to_owned()) + } } } diff --git a/crates/sparrow-compiler/src/dfg/expr.rs b/crates/sparrow-compiler/src/dfg/expr.rs index 2735ef119..c408d6f50 100644 --- a/crates/sparrow-compiler/src/dfg/expr.rs +++ b/crates/sparrow-compiler/src/dfg/expr.rs @@ -1,10 +1,11 @@ //! An expression based representation of the DFG. use anyhow::Context; -use egg::{Id, Language, RecExpr}; +use egg::{AstSize, Extractor, Id, Language, RecExpr}; use super::DfgLang; -use crate::dfg::StepKind; +use crate::dfg::{simplification, DfgGraph, StepKind}; +use crate::CompilerOptions; /// The expression within the DFG. /// @@ -67,6 +68,25 @@ impl DfgExpr { &self.expr } + pub fn simplify(self, options: &CompilerOptions) -> anyhow::Result { + let _span = tracing::info_span!("Running simplificatons").entered(); + + let mut graph = DfgGraph::default(); + let id = graph.add_expr(&self.expr); + let graph = simplification::run_simplifications(graph, options)?; + + let extractor = Extractor::new(&graph, AstSize); + let (best_cost, best_expr) = extractor.find_best(id); + + tracing::info!( + "Extracted expression with cost {} and length {}", + best_cost, + best_expr.as_ref().len() + ); + + Ok(Self::new(best_expr)) + } + pub fn operation(&self, id: Id) -> Option { let node = &self.expr[id]; match node.kind() { diff --git a/crates/sparrow-compiler/src/dfg/language.rs b/crates/sparrow-compiler/src/dfg/language.rs index 6aefe27e6..c68fd8c47 100644 --- a/crates/sparrow-compiler/src/dfg/language.rs +++ b/crates/sparrow-compiler/src/dfg/language.rs @@ -78,7 +78,7 @@ mod tests { use egg::{EGraph, Pattern, Searcher, Var}; use smallvec::smallvec; use sparrow_arrow::scalar_value::ScalarValue; - use sparrow_plan::{InstKind, InstOp}; + use sparrow_instructions::{InstKind, InstOp}; use super::*; use crate::dfg::Expression; diff --git a/crates/sparrow-compiler/src/dfg/step_kind.rs b/crates/sparrow-compiler/src/dfg/step_kind.rs index 107d6a8f1..6bbfb42ad 100644 --- a/crates/sparrow-compiler/src/dfg/step_kind.rs +++ b/crates/sparrow-compiler/src/dfg/step_kind.rs @@ -7,7 +7,7 @@ use anyhow::Context; use sparrow_api::kaskada::v1alpha::operation_plan::tick_operation::TickBehavior; use sparrow_api::kaskada::v1alpha::{slice_plan, LateBoundValue}; use sparrow_arrow::scalar_value::ScalarValue; -use sparrow_plan::{InstKind, InstOp, TableId}; +use sparrow_instructions::{InstKind, InstOp, TableId}; use sparrow_syntax::WindowBehavior; use uuid::Uuid; diff --git a/crates/sparrow-compiler/src/dfg/useless_transforms.rs b/crates/sparrow-compiler/src/dfg/useless_transforms.rs index 62a3c9b0b..6adadf68d 100644 --- a/crates/sparrow-compiler/src/dfg/useless_transforms.rs +++ b/crates/sparrow-compiler/src/dfg/useless_transforms.rs @@ -13,7 +13,7 @@ use crate::dfg::{DfgExpr, StepKind}; /// guaranteed that all such nodes are removed. This pass ensures /// that any later analysis of the DFG does not have to deal with /// useless transforms. -pub(crate) fn remove_useless_transforms(expr: DfgExpr) -> anyhow::Result { +pub fn remove_useless_transforms(expr: DfgExpr) -> anyhow::Result { let mut rewritten = DfgExpr::with_capacity(expr.len()); let mut rewritten_ids = Vec::with_capacity(expr.len()); diff --git a/crates/sparrow-compiler/src/diagnostics/builder.rs b/crates/sparrow-compiler/src/diagnostics/builder.rs index 07fb06af8..7b3bfe408 100644 --- a/crates/sparrow-compiler/src/diagnostics/builder.rs +++ b/crates/sparrow-compiler/src/diagnostics/builder.rs @@ -7,7 +7,7 @@ use crate::diagnostics::collector::DiagnosticCollector; /// Builder for creating and emitting a diagnostic. #[must_use] #[derive(Debug, PartialEq)] -pub(crate) struct DiagnosticBuilder { +pub struct DiagnosticBuilder { code: DiagnosticCode, diagnostic: Diagnostic, } diff --git a/crates/sparrow-compiler/src/diagnostics/collector.rs b/crates/sparrow-compiler/src/diagnostics/collector.rs index 579e54b35..d72feed7c 100644 --- a/crates/sparrow-compiler/src/diagnostics/collector.rs +++ b/crates/sparrow-compiler/src/diagnostics/collector.rs @@ -10,7 +10,7 @@ use crate::diagnostics::DiagnosticCode; use crate::DiagnosticBuilder; /// Collects the diagnostic messages being reported. -pub(crate) struct DiagnosticCollector<'a> { +pub struct DiagnosticCollector<'a> { feature_set: FeatureSetParts<'a>, /// Collect the diagnostic messages. collected: Vec, @@ -29,7 +29,8 @@ impl<'a> std::fmt::Debug for DiagnosticCollector<'a> { #[derive(Clone, Debug)] pub struct CollectedDiagnostic { code: DiagnosticCode, - formatted: String, + pub formatted: String, + pub message: String, } impl CollectedDiagnostic { @@ -119,9 +120,11 @@ impl<'a> DiagnosticCollector<'a> { self.collected.push(CollectedDiagnostic { code: DiagnosticCode::FailedToReport, formatted: "Failed to report diagnostic".to_owned(), + message: "Failed to report diagnostic".to_owned(), }); return; }; + let message = diagnostic.message.clone(); let formatted = match String::from_utf8(buffer.into_inner()) { Ok(formatted) => formatted, Err(err) => { @@ -132,12 +135,17 @@ impl<'a> DiagnosticCollector<'a> { self.collected.push(CollectedDiagnostic { code: DiagnosticCode::FailedToReport, formatted: "Failed to report diagnostic".to_owned(), + message, }); return; } }; - let diagnostic = CollectedDiagnostic { code, formatted }; + let diagnostic = CollectedDiagnostic { + code, + formatted, + message, + }; match code.severity() { Severity::Bug | Severity::Error => { diff --git a/crates/sparrow-compiler/src/diagnostics/feature_set_parts.rs b/crates/sparrow-compiler/src/diagnostics/feature_set_parts.rs index a5065ebc6..9215bd8a0 100644 --- a/crates/sparrow-compiler/src/diagnostics/feature_set_parts.rs +++ b/crates/sparrow-compiler/src/diagnostics/feature_set_parts.rs @@ -33,6 +33,7 @@ pub enum PartName<'a> { Formula(&'a str), /// Return the part name for the query string. Query, + Builder, } impl<'a> std::fmt::Display for PartName<'a> { @@ -44,6 +45,7 @@ impl<'a> std::fmt::Display for PartName<'a> { PartName::Label(name) => write!(f, "'{name}'"), PartName::Formula(name) => write!(f, "'Formula: {name}'"), PartName::Query => write!(f, "Query"), + PartName::Builder => write!(f, "Builder"), } } } @@ -112,6 +114,7 @@ impl<'a> Files<'a> for FeatureSetParts<'a> { } } FeatureSetPart::Query => Ok(PartName::Query), + FeatureSetPart::Builder => Ok(PartName::Builder), } } @@ -127,6 +130,7 @@ impl<'a> Files<'a> for FeatureSetParts<'a> { .ok_or(Error::FileMissing)? .formula), FeatureSetPart::Query => Ok(&self.feature_set.query), + FeatureSetPart::Builder => Ok("builder"), } } @@ -140,6 +144,7 @@ impl<'a> Files<'a> for FeatureSetParts<'a> { .get(index as usize) .ok_or(Error::FileMissing)?, FeatureSetPart::Query => &self.query_line_starts, + FeatureSetPart::Builder => return Ok(0), }; Ok(line_starts @@ -163,6 +168,7 @@ impl<'a> Files<'a> for FeatureSetParts<'a> { .get(index as usize) .ok_or(Error::FileMissing)?, FeatureSetPart::Query => &self.query_line_starts, + FeatureSetPart::Builder => return Ok(0.."builder".len()), }; self.line_range_helper(id, line_starts, line_index) diff --git a/crates/sparrow-compiler/src/frontend.rs b/crates/sparrow-compiler/src/frontend.rs index 9089f8302..6f746e4b6 100644 --- a/crates/sparrow-compiler/src/frontend.rs +++ b/crates/sparrow-compiler/src/frontend.rs @@ -10,14 +10,14 @@ pub(crate) mod resolve_arguments; mod slice_analysis; use std::collections::BTreeSet; -use std::rc::Rc; +use std::sync::Arc; use anyhow::anyhow; use arrow::datatypes::{DataType, TimeUnit}; use smallvec::smallvec; use sparrow_api::kaskada::v1alpha::compile_request::ExpressionKind; use sparrow_api::kaskada::v1alpha::{FeatureSet, LateBoundValue, PerEntityBehavior, SlicePlan}; -use sparrow_plan::GroupId; +use sparrow_instructions::GroupId; use sparrow_syntax::{FeatureSetPart, FenlType, Location}; use tracing::error; @@ -352,7 +352,7 @@ fn create_changed_since_time_node(dfg: &mut Dfg) -> anyhow::Result { )?; let value_type = FenlType::Concrete(DataType::Timestamp(TimeUnit::Nanosecond, None)); let is_new = dfg.add_literal(false)?; - Ok(Rc::new(AstDfg::new( + Ok(Arc::new(AstDfg::new( value, is_new, value_type, @@ -375,7 +375,7 @@ fn create_final_at_time_time_node(dfg: &mut Dfg) -> anyhow::Result { )?; let value_type = FenlType::Concrete(DataType::Timestamp(TimeUnit::Nanosecond, None)); let is_new = dfg.add_literal(false)?; - Ok(Rc::new(AstDfg::new( + Ok(Arc::new(AstDfg::new( value, is_new, value_type, diff --git a/crates/sparrow-compiler/src/frontend/incremental_enabled.rs b/crates/sparrow-compiler/src/frontend/incremental_enabled.rs index 0cab995b4..d449c2748 100644 --- a/crates/sparrow-compiler/src/frontend/incremental_enabled.rs +++ b/crates/sparrow-compiler/src/frontend/incremental_enabled.rs @@ -1,5 +1,5 @@ use sparrow_api::kaskada::v1alpha::PerEntityBehavior; -use sparrow_plan::{InstKind, InstOp}; +use sparrow_instructions::{InstKind, InstOp}; use crate::dfg::{DfgExpr, Expression, Operation, StepKind}; use crate::CompilerOptions; @@ -27,7 +27,7 @@ pub(super) fn is_incremental_enabled(dfg: &DfgExpr, options: &CompilerOptions) - fn is_incremental_supported(step: &StepKind) -> bool { match step { StepKind::Operation(Operation::Empty | Operation::MergeJoin) => true, - StepKind::Expression(Expression::Inst(InstKind::Simple(InstOp::Lag))) => true, + StepKind::Expression(Expression::Inst(InstKind::Simple(InstOp::Collect))) => true, StepKind::Operation(Operation::Scan { .. } | Operation::Select | Operation::Tick(_)) => { true } diff --git a/crates/sparrow-compiler/src/frontend/parse_expr.rs b/crates/sparrow-compiler/src/frontend/parse_expr.rs index bb4466c91..ae100f1c4 100644 --- a/crates/sparrow-compiler/src/frontend/parse_expr.rs +++ b/crates/sparrow-compiler/src/frontend/parse_expr.rs @@ -27,7 +27,7 @@ fn parse_error_to_diagnostic( ParseError::InvalidToken { location } => DiagnosticCode::SyntaxError .builder() .with_label(Label::primary(part_id, location..location).with_message("Invalid token")), - ParseError::UnrecognizedEOF { location, expected } => { + ParseError::UnrecognizedEof { location, expected } => { let diagnostic = DiagnosticCode::SyntaxError.builder().with_label( Label::primary(part_id, location..location).with_message("Unexpected EOF"), ); diff --git a/crates/sparrow-compiler/src/frontend/resolve_arguments.rs b/crates/sparrow-compiler/src/frontend/resolve_arguments.rs index f42b422b8..94cb8d2bd 100644 --- a/crates/sparrow-compiler/src/frontend/resolve_arguments.rs +++ b/crates/sparrow-compiler/src/frontend/resolve_arguments.rs @@ -1,6 +1,5 @@ use std::borrow::Cow; -use itertools::Itertools; use sparrow_syntax::{ Arguments, Expr, ExprOp, ExprRef, Located, ResolveError, Resolved, ResolvedExpr, }; @@ -9,7 +8,7 @@ use static_init::dynamic; use crate::{DiagnosticBuilder, DiagnosticCode}; #[dynamic] -static FIELD_REF_ARGUMENTS: [Located; 1] = [Located::internal_string("record")]; +pub(crate) static FIELD_REF_ARGUMENTS: [Located; 1] = [Located::internal_string("record")]; #[dynamic] static PIPE_ARGUMENTS: [Located; 2] = [ @@ -106,12 +105,7 @@ fn resolve_arguments( .primary_label() .with_message(format!("No function named '{function_name}'")), ) - .with_note(format!( - "Nearest matches: {}", - candidates - .iter() - .format_with(", ", |e, f| f(&format_args!("'{e}'"))) - )); + .with_note(format!("Nearest matches: {candidates}")); return Err(Some(diagnostic)); } }, @@ -223,15 +217,15 @@ fn resolve_arguments( )), )), ResolveError::InvalidKeywordArgument { keyword } => { - let nearest = crate::nearest_matches::nearest_matches( + let nearest = crate::nearest_matches::NearestMatches::new_nearest_strs( keyword.inner(), - names.iter().map(Located::inner), + names.iter().map(|s| s.inner().as_str()), ); Some( DiagnosticCode::InvalidArguments .builder() .with_label(operator_location.primary_label()) - .with_note(format!("Nearest matches: {}", nearest.iter().format(", "))), + .with_note(format!("Nearest matches: {nearest}")), ) } }) @@ -322,9 +316,9 @@ mod tests { _ => panic!("expected call"), }; - // Sum should have three args: [input, tick, duration]. + // Sum should have two args: [input, window]. let sum_arg = args[1].inner().args(); - assert_eq!(sum_arg.len(), 3); + assert_eq!(sum_arg.len(), 2); let input = sum_arg[0].inner(); match input.op() { ExprOp::Reference(str) => assert_eq!(str.inner(), "$input"), diff --git a/crates/sparrow-compiler/src/functions.rs b/crates/sparrow-compiler/src/functions.rs index eff086b97..5aa988431 100644 --- a/crates/sparrow-compiler/src/functions.rs +++ b/crates/sparrow-compiler/src/functions.rs @@ -20,6 +20,7 @@ pub use function::*; use implementation::*; pub(crate) use pushdown::*; pub use registry::*; +pub use time_domain_check::*; /// Register all the functions available in the registry. fn register_functions(registry: &mut Registry) { diff --git a/crates/sparrow-compiler/src/functions/aggregation.rs b/crates/sparrow-compiler/src/functions/aggregation.rs index dbd5c80f0..deca779dc 100644 --- a/crates/sparrow-compiler/src/functions/aggregation.rs +++ b/crates/sparrow-compiler/src/functions/aggregation.rs @@ -10,7 +10,7 @@ pub(super) fn register(registry: &mut Registry) { registry .register("count_if(input: T, window: window = null) -> u32") .with_dfg_signature( - "count_if(input: T, window: window = null, duration: i64 = null) -> u32", + "count_if(input: T, window: bool = null, duration: i64 = null) -> u32", ) .with_implementation(Implementation::new_pattern(&format!( "(count_if ({}) ({}) ({}))", @@ -24,7 +24,7 @@ pub(super) fn register(registry: &mut Registry) { registry .register("count(input: T, window: window = null) -> u32") .with_dfg_signature( - "count_if(input: T, window: window = null, duration: i64 = null) -> u32", + "count_if(input: T, window: bool = null, duration: i64 = null) -> u32", ) .with_implementation(Implementation::new_pattern(&format!( "(count_if ({}) ({}) ({}))", @@ -39,7 +39,7 @@ pub(super) fn register(registry: &mut Registry) { registry .register("sum(input: N, window: window = null) -> N ") .with_dfg_signature( - "sum(input: N, window: window = null, duration: i64 = null) -> N", + "sum(input: N, window: bool = null, duration: i64 = null) -> N", ) .with_implementation(Implementation::new_pattern(&format!( "(sum ({}) ({}) ({}))", @@ -53,7 +53,7 @@ pub(super) fn register(registry: &mut Registry) { registry .register("min(input: O, window: window = null) -> O") .with_dfg_signature( - "min(input: O, window: window = null, duration: i64 = null) -> O", + "min(input: O, window: bool = null, duration: i64 = null) -> O", ) .with_implementation(Implementation::new_pattern(&format!( "(min ({}) ({}) ({}))", @@ -67,7 +67,7 @@ pub(super) fn register(registry: &mut Registry) { registry .register("max(input: O, window: window = null) -> O") .with_dfg_signature( - "max(input: O, window: window = null, duration: i64 = null) -> O", + "max(input: O, window: bool = null, duration: i64 = null) -> O", ) .with_implementation(Implementation::new_pattern(&format!( "(max ({}) ({}) ({}))", @@ -81,7 +81,7 @@ pub(super) fn register(registry: &mut Registry) { registry .register("mean(input: N, window: window = null) -> f64") .with_dfg_signature( - "mean(input: N, window: window = null, duration: i64 = null) -> f64", + "mean(input: N, window: bool = null, duration: i64 = null) -> f64", ) .with_implementation(Implementation::new_pattern(&format!( "(mean ({}) ({}) ({}))", @@ -95,7 +95,7 @@ pub(super) fn register(registry: &mut Registry) { registry .register("variance(input: N, window: window = null) -> f64") .with_dfg_signature( - "variance(input: N, window: window = null, duration: i64 = null) -> f64", + "variance(input: N, window: bool = null, duration: i64 = null) -> f64", ) .with_implementation(Implementation::new_pattern(&format!( "(variance ({}) ({}) ({}))", @@ -109,7 +109,7 @@ pub(super) fn register(registry: &mut Registry) { registry .register("stddev(input: N, window: window = null) -> f64") .with_dfg_signature( - "stddev(input: N, window: window = null, duration: i64 = null) -> f64", + "stddev(input: N, window: bool = null, duration: i64 = null) -> f64", ) .with_implementation(Implementation::new_pattern(&format!( "(powf (variance ({}) ({}) ({})) 0.5f64)", @@ -122,7 +122,7 @@ pub(super) fn register(registry: &mut Registry) { registry .register("last(input: T, window: window = null) -> T") .with_dfg_signature( - "last(input: T, window: window = null, duration: i64 = null) -> T", + "last(input: T, window: bool = null, duration: i64 = null) -> T", ) .with_implementation(Implementation::Pushdown(Box::new( Pushdown::try_new( @@ -164,7 +164,7 @@ pub(super) fn register(registry: &mut Registry) { registry .register("first(input: T, window: window = null) -> T") .with_dfg_signature( - "first(input: T, window: window = null, duration: i64 = null) -> T", + "first(input: T, window: bool = null, duration: i64 = null) -> T", ) .with_implementation(Implementation::Pushdown(Box::new( Pushdown::try_new( diff --git a/crates/sparrow-compiler/src/functions/collection.rs b/crates/sparrow-compiler/src/functions/collection.rs index 2b5d29b4c..bfd8d0e72 100644 --- a/crates/sparrow-compiler/src/functions/collection.rs +++ b/crates/sparrow-compiler/src/functions/collection.rs @@ -1,10 +1,43 @@ -use sparrow_plan::InstOp; +use sparrow_instructions::InstOp; use crate::functions::{Implementation, Registry}; +use super::time_domain_check::TimeDomainCheck; + pub(super) fn register(registry: &mut Registry) { registry .register("get(key: K, map: map) -> V") .with_implementation(Implementation::Instruction(InstOp::Get)) .set_internal(); + + registry + .register("index(i: i64, list: list) -> T") + .with_implementation(Implementation::Instruction(InstOp::Index)) + .set_internal(); + + registry + .register("collect(input: T, const max: i64, const min: i64 = 0, window: window = null) -> list") + .with_dfg_signature( + "collect(input: T, const max: i64, const min: i64 = 0, window: bool = null, duration: i64 = null) -> list", + ) + .with_implementation(Implementation::Instruction(InstOp::Collect)) + // This makes `collect` a continuous function. Though, it's not perhaps defined + // as an aggregation, so we may want to rename or create a new category for it. + .with_time_domain_check(TimeDomainCheck::Aggregation) + .set_internal(); + + registry + .register("list_len(input: list) -> i32") + .with_implementation(Implementation::Instruction(InstOp::ListLen)) + .set_internal(); + + registry + .register("flatten(input: list>) -> list") + .with_implementation(Implementation::Instruction(InstOp::Flatten)) + .set_internal(); + + registry + .register("union(a: list, b: list) -> list") + .with_implementation(Implementation::Instruction(InstOp::Union)) + .set_internal(); } diff --git a/crates/sparrow-compiler/src/functions/comparison.rs b/crates/sparrow-compiler/src/functions/comparison.rs index 4a3afaca9..7b4f2d852 100644 --- a/crates/sparrow-compiler/src/functions/comparison.rs +++ b/crates/sparrow-compiler/src/functions/comparison.rs @@ -1,4 +1,4 @@ -use sparrow_plan::InstOp; +use sparrow_instructions::InstOp; use crate::functions::{Implementation, Registry}; diff --git a/crates/sparrow-compiler/src/functions/function.rs b/crates/sparrow-compiler/src/functions/function.rs index baa156ea1..5ce389f87 100644 --- a/crates/sparrow-compiler/src/functions/function.rs +++ b/crates/sparrow-compiler/src/functions/function.rs @@ -1,10 +1,10 @@ -use std::rc::Rc; use std::str::FromStr; +use std::sync::Arc; use egg::{Subst, Var}; use itertools::{izip, Itertools}; use sparrow_api::kaskada::v1alpha::operation_plan::tick_operation::TickBehavior; -use sparrow_plan::GroupId; +use sparrow_instructions::GroupId; use sparrow_syntax::{FeatureSetPart, FenlType, Located, Location, Signature}; use crate::ast_to_dfg::AstDfg; @@ -118,18 +118,13 @@ impl Function { self.internal } - /// Returns the internal signature, if one exists. Else, returns the - /// user-facing signature. - /// - /// This is used for certain functions like aggregations, where the - /// arguments are flattened in the DFG, requiring us to check parameters - /// against an internal signature representing the flattened arguments. - pub(crate) fn signature(&self) -> &Signature { - if let Some(signature) = &self.internal_signature { - signature - } else { - &self.signature - } + /// Returns the user-facing signature. + pub fn signature(&self) -> &Signature { + &self.signature + } + + pub fn internal_signature(&self) -> &Signature { + self.internal_signature.as_ref().unwrap_or(&self.signature) } pub fn signature_str(&self) -> &'static str { @@ -224,7 +219,7 @@ impl Function { self.time_domain_check .check_args(location, diagnostics, args, data_context)?; - Ok(Rc::new(AstDfg::new( + Ok(Arc::new(AstDfg::new( value, is_new, value_type, diff --git a/crates/sparrow-compiler/src/functions/general.rs b/crates/sparrow-compiler/src/functions/general.rs index 3517058a5..ed66ce41f 100644 --- a/crates/sparrow-compiler/src/functions/general.rs +++ b/crates/sparrow-compiler/src/functions/general.rs @@ -1,4 +1,4 @@ -use sparrow_plan::InstOp; +use sparrow_instructions::InstOp; use crate::functions::{Implementation, Registry}; diff --git a/crates/sparrow-compiler/src/functions/implementation.rs b/crates/sparrow-compiler/src/functions/implementation.rs index 9e7b16367..012682fc7 100644 --- a/crates/sparrow-compiler/src/functions/implementation.rs +++ b/crates/sparrow-compiler/src/functions/implementation.rs @@ -6,7 +6,7 @@ use itertools::izip; use once_cell::sync::OnceCell; use smallvec::smallvec; use sparrow_api::kaskada::v1alpha::operation_plan::tick_operation::TickBehavior; -use sparrow_plan::InstOp; +use sparrow_instructions::InstOp; use sparrow_syntax::{Expr, FeatureSetPart, FenlType, Located, ResolvedExpr, WindowBehavior}; use crate::ast_to_dfg::ast_to_dfg; diff --git a/crates/sparrow-compiler/src/functions/json.rs b/crates/sparrow-compiler/src/functions/json.rs index e4388447f..42fde3b59 100644 --- a/crates/sparrow-compiler/src/functions/json.rs +++ b/crates/sparrow-compiler/src/functions/json.rs @@ -1,4 +1,4 @@ -use sparrow_plan::InstOp; +use sparrow_instructions::InstOp; use super::implementation::Implementation; use crate::functions::Registry; diff --git a/crates/sparrow-compiler/src/functions/logical.rs b/crates/sparrow-compiler/src/functions/logical.rs index a99667a22..19929342b 100644 --- a/crates/sparrow-compiler/src/functions/logical.rs +++ b/crates/sparrow-compiler/src/functions/logical.rs @@ -1,4 +1,4 @@ -use sparrow_plan::InstOp; +use sparrow_instructions::InstOp; use crate::functions::{Implementation, Registry}; diff --git a/crates/sparrow-compiler/src/functions/math.rs b/crates/sparrow-compiler/src/functions/math.rs index c1bdf80b4..6fd2186b2 100644 --- a/crates/sparrow-compiler/src/functions/math.rs +++ b/crates/sparrow-compiler/src/functions/math.rs @@ -1,4 +1,4 @@ -use sparrow_plan::InstOp; +use sparrow_instructions::InstOp; use crate::functions::{Implementation, Registry}; diff --git a/crates/sparrow-compiler/src/functions/pushdown.rs b/crates/sparrow-compiler/src/functions/pushdown.rs index 7d9f1e4e6..09b23fd24 100644 --- a/crates/sparrow-compiler/src/functions/pushdown.rs +++ b/crates/sparrow-compiler/src/functions/pushdown.rs @@ -4,7 +4,7 @@ use anyhow::{anyhow, Context}; use arrow::datatypes::{DataType, FieldRef}; use egg::{Id, Subst, Var}; use smallvec::smallvec; -use sparrow_plan::InstKind; +use sparrow_instructions::InstKind; use crate::dfg::{ChildrenVec, Dfg, DfgPattern, Expression}; @@ -111,6 +111,7 @@ impl Pushdown { | DataType::Interval(_) | DataType::Utf8 | DataType::LargeUtf8 + | DataType::List(..) | DataType::Map(..) => { let mut subst = subst.clone(); subst.insert( diff --git a/crates/sparrow-compiler/src/functions/registry.rs b/crates/sparrow-compiler/src/functions/registry.rs index 78b9794c1..52028f853 100644 --- a/crates/sparrow-compiler/src/functions/registry.rs +++ b/crates/sparrow-compiler/src/functions/registry.rs @@ -3,6 +3,8 @@ use hashbrown::HashMap; use sparrow_syntax::{FeatureSetPart, Signature}; use static_init::dynamic; +use crate::nearest_matches::NearestMatches; + use super::{Function, FunctionBuilder}; pub(super) struct Registry { @@ -56,9 +58,9 @@ impl Registry { /// /// # Errors /// Returns the 5 closest matches from the registry. -pub fn get_function(name: &str) -> Result<&'static Function, Vec<&'static str>> { +pub fn get_function(name: &str) -> Result<&'static Function, NearestMatches<&'static str>> { REGISTRY.get_by_name(name).ok_or_else(|| { - crate::nearest_matches::nearest_matches( + crate::nearest_matches::NearestMatches::new_nearest_strs( name, REGISTRY .iter() diff --git a/crates/sparrow-compiler/src/functions/string.rs b/crates/sparrow-compiler/src/functions/string.rs index 4188228ea..bdd06031b 100644 --- a/crates/sparrow-compiler/src/functions/string.rs +++ b/crates/sparrow-compiler/src/functions/string.rs @@ -1,4 +1,4 @@ -use sparrow_plan::InstOp; +use sparrow_instructions::InstOp; use crate::functions::{Implementation, Registry}; diff --git a/crates/sparrow-compiler/src/functions/time.rs b/crates/sparrow-compiler/src/functions/time.rs index b8975904f..6c3552723 100644 --- a/crates/sparrow-compiler/src/functions/time.rs +++ b/crates/sparrow-compiler/src/functions/time.rs @@ -1,4 +1,4 @@ -use sparrow_plan::InstOp; +use sparrow_instructions::InstOp; use crate::functions::time_domain_check::TimeDomainCheck; use crate::functions::{Implementation, Registry}; @@ -133,5 +133,7 @@ pub(super) fn register(registry: &mut Registry) { // Note: Lag is specifically *not* an aggregation function. registry .register("lag(const n: i64, input: O) -> O") - .with_implementation(Implementation::Instruction(InstOp::Lag)); + .with_implementation(Implementation::new_fenl_rewrite( + "input | collect(min=n+1, max=n+1) | index(0)", + )); } diff --git a/crates/sparrow-compiler/src/functions/time_domain_check.rs b/crates/sparrow-compiler/src/functions/time_domain_check.rs index 4967ef701..7748fde50 100644 --- a/crates/sparrow-compiler/src/functions/time_domain_check.rs +++ b/crates/sparrow-compiler/src/functions/time_domain_check.rs @@ -12,7 +12,7 @@ use crate::{AstDfgRef, DataContext, DiagnosticCollector}; /// are subject to change. It may be better to use a closure to allow defining /// the special behaviors as part of each function. #[derive(Default)] -pub(super) enum TimeDomainCheck { +pub enum TimeDomainCheck { /// The function requires the arguments to be compatible, and returns /// the resulting time domain. /// @@ -47,7 +47,7 @@ pub(super) enum TimeDomainCheck { } impl TimeDomainCheck { - pub(super) fn check_args( + pub fn check_args( &self, location: &Location, diagnostics: &mut DiagnosticCollector<'_>, diff --git a/crates/sparrow-compiler/src/lib.rs b/crates/sparrow-compiler/src/lib.rs index 6631f2e85..f73670e80 100644 --- a/crates/sparrow-compiler/src/lib.rs +++ b/crates/sparrow-compiler/src/lib.rs @@ -41,7 +41,7 @@ mod frontend; mod functions; mod nearest_matches; mod options; -mod plan; +pub mod plan; mod time_domain; mod types; @@ -49,8 +49,11 @@ mod types; pub use ast_to_dfg::*; pub use compile::*; pub use data_context::*; +pub use dfg::{remove_useless_transforms, Dfg}; pub use diagnostics::*; pub use error::*; pub use frontend::*; pub use functions::*; pub use options::*; + +pub use nearest_matches::NearestMatches; diff --git a/crates/sparrow-compiler/src/nearest_matches.rs b/crates/sparrow-compiler/src/nearest_matches.rs index 51253e044..08b834935 100644 --- a/crates/sparrow-compiler/src/nearest_matches.rs +++ b/crates/sparrow-compiler/src/nearest_matches.rs @@ -1,14 +1,89 @@ use edit_distance::edit_distance; use itertools::Itertools; -/// Return a vector containing the up-to-5 nearest matches. -pub(crate) fn nearest_matches + Ord>( - query: &str, - items: impl Iterator, -) -> Vec { - items - .map(|item| (edit_distance(query, item.as_ref()), item)) - .k_smallest(5) - .map(|(_, item)| item) - .collect() +/// The nearest matches to a given name. +#[derive(Debug, PartialEq, Clone)] +pub struct NearestMatches(Vec); + +impl error_stack::Context + for NearestMatches +{ +} + +impl Default for NearestMatches { + fn default() -> Self { + Self(vec![]) + } +} + +impl NearestMatches { + pub fn map(self, f: impl Fn(T) -> T2) -> NearestMatches { + NearestMatches(self.0.into_iter().map(f).collect()) + } +} + +impl<'a> NearestMatches<&'a str> { + /// Create a set of nearest matches for a given string. + pub fn new_nearest_strs(query: &str, items: impl Iterator + 'a) -> Self { + let nearest_matches: Vec<_> = items + .map(|item| (edit_distance(query, item), item)) + .k_smallest(5) + .map(|(_, item)| item) + .collect(); + Self(nearest_matches) + } +} + +impl NearestMatches { + /// Create a set of nearest matches for a given string. + pub fn new_nearest_strings(query: &str, items: impl Iterator) -> Self { + let nearest_matches: Vec<_> = items + .map(|item| (edit_distance(query, item.as_ref()), item)) + .k_smallest(5) + .map(|(_, item)| item) + .collect(); + Self(nearest_matches) + } +} + +impl std::fmt::Display for NearestMatches { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if self.0.is_empty() { + write!(f, "none") + } else { + self.0 + .iter() + .format_with(", ", |e, f| f(&format_args!("'{e}'"))) + .fmt(f) + } + } +} + +impl From> for NearestMatches { + fn from(matches: Vec) -> Self { + Self(matches) + } +} + +impl NearestMatches { + pub fn inner(self) -> Vec { + self.0 + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_nearest_matches_display() { + insta::assert_display_snapshot!(NearestMatches::<&'static str>::from(vec![]), @"none"); + insta::assert_display_snapshot!(NearestMatches::from(vec!["foo"]), @"'foo'"); + insta::assert_display_snapshot!(NearestMatches::from(vec!["foo", "bar"]), @"'foo', 'bar'"); + insta::assert_display_snapshot!(NearestMatches::from(vec!["foo", "bar", "baz"]), @"'foo', 'bar', 'baz'"); + } } diff --git a/crates/sparrow-compiler/src/plan.rs b/crates/sparrow-compiler/src/plan.rs index 28dab43bb..e991094e5 100644 --- a/crates/sparrow-compiler/src/plan.rs +++ b/crates/sparrow-compiler/src/plan.rs @@ -24,7 +24,7 @@ const DBG_PRINT_PLAN: bool = false; /// TODO: The `DataContext` is used to get the table name from an ID, which is /// only necessary to create the `slice_plan` because it uses a name instead of /// an ID. -pub(super) fn extract_plan_proto( +pub fn extract_plan_proto( data_context: &DataContext, expr: DfgExpr, per_entity_behavior: PerEntityBehavior, diff --git a/crates/sparrow-compiler/src/plan/expression_to_plan.rs b/crates/sparrow-compiler/src/plan/expression_to_plan.rs index b9bc0df53..d767dcb90 100644 --- a/crates/sparrow-compiler/src/plan/expression_to_plan.rs +++ b/crates/sparrow-compiler/src/plan/expression_to_plan.rs @@ -2,7 +2,7 @@ use anyhow::Context; use sparrow_api::kaskada::v1alpha::DataType; use sparrow_api::kaskada::v1alpha::{expression_plan, ExpressionPlan}; use sparrow_arrow::scalar_value::ScalarValue; -use sparrow_plan::{InstKind, Mode}; +use sparrow_instructions::InstKind; use sparrow_syntax::{ArgVec, FenlType}; use crate::dfg::Expression; @@ -35,6 +35,7 @@ pub(super) fn dfg_to_plan( InstKind::FieldRef => "field_ref".to_owned(), InstKind::Record => "record".to_owned(), InstKind::Cast(_) => "cast".to_owned(), + InstKind::Udf(udf) => udf.signature().name().to_owned(), }; let result_type = @@ -97,11 +98,7 @@ fn infer_result_type( } } - let result_type = crate::types::instruction::typecheck_inst( - inst_kind, - argument_types, - &argument_literals, - Mode::Plan, - )?; + let result_type = + crate::types::instruction::typecheck_inst(inst_kind, argument_types, &argument_literals)?; DataType::try_from(&result_type).context("unable to encode result type") } diff --git a/crates/sparrow-compiler/src/plan/interpolations.rs b/crates/sparrow-compiler/src/plan/interpolations.rs index 3bc98b893..73c5915bf 100644 --- a/crates/sparrow-compiler/src/plan/interpolations.rs +++ b/crates/sparrow-compiler/src/plan/interpolations.rs @@ -1,5 +1,5 @@ use sparrow_api::kaskada::v1alpha::operation_input_ref::Interpolation; -use sparrow_plan::{InstKind, InstOp}; +use sparrow_instructions::{InstKind, InstOp}; use crate::dfg::{DfgExpr, Expression, Operation, StepKind}; @@ -50,8 +50,10 @@ impl Interpolations { // always be able to be present in subsequent instructions. Interpolation::AsOf } + // TODO: `collect` should be in it's own special grouping, + // or we should just start calling it an aggregation everywhere. StepKind::Expression(Expression::Inst(InstKind::Simple(inst))) - if inst.is_aggregation() => + if inst.is_aggregation() || inst.name() == "collect" => { Interpolation::AsOf } diff --git a/crates/sparrow-compiler/src/plan/operation_to_plan.rs b/crates/sparrow-compiler/src/plan/operation_to_plan.rs index 0d5de71de..6ba346a24 100644 --- a/crates/sparrow-compiler/src/plan/operation_to_plan.rs +++ b/crates/sparrow-compiler/src/plan/operation_to_plan.rs @@ -21,10 +21,16 @@ use crate::DataContext; /// DataType protobuf representing a list of u64. #[static_init::dynamic] static LIST_U64_DATA_TYPE: DataType = DataType { - kind: Some(data_type::Kind::List(Box::new(DataType { - kind: Some(data_type::Kind::Primitive( - data_type::PrimitiveType::U64 as i32, - )), + kind: Some(data_type::Kind::List(Box::new(data_type::List { + // Note: The fields here must match the default fields used when creating + // types during type inference, otherwise schema validation will fail. + name: "item".to_owned(), + item_type: Some(Box::new(DataType { + kind: Some(data_type::Kind::Primitive( + data_type::PrimitiveType::U64 as i32, + )), + })), + nullable: true, }))), }; diff --git a/crates/sparrow-compiler/src/table_content.rs b/crates/sparrow-compiler/src/table_content.rs new file mode 100644 index 000000000..236ad2b71 --- /dev/null +++ b/crates/sparrow-compiler/src/table_content.rs @@ -0,0 +1,65 @@ +use tonic::codegen::BoxStream; + +use crate::Error; + +pub struct TableContent { + schema: SchemaRef, + version: usize, + merged: RecordBatch, + updates: tokio::sync::broadcast::Sender<(usize, RecordBatch)>, +} + +impl TableContent { + pub fn new(schema: SchemaRef) -> Self { + let (updates, _) = tokio::sync::broadcast::channel(1); + let merged = RecordBatch::new_empty(schema.clone()); + mSelf { + schema, + version: 0, + merged, + updates, + } + } + + pub fn add_batch(&mut self, batch: RecordBatch) -> error_stack::Result<(), Error> { + if batch.num_rows() == 0 { + return Ok(()); + } + + let merged = if self.merged.num_rows() == 0 { + batch + } else { + homogeneous_merge(&self.schema, vec![self.merged.clone(), batch]) + .into_report() + .change_context(Error::Internal("add_batch"))? + }; + + self.version += 1; + self.updates + .send((self.version, merged.clone())) + .into_report() + .change_context(Error::Internal("add_batch"))?; + self.merged = merged; + Ok(()) + } + + pub fn stream(&self) -> BoxStream { + async_stream::stream! { + let mut version = self.version; + let mut batches = self.updates.subscribe(); + + tracing::info!("Starting subscriber with version {version}"); + yield self.merged; + while let Ok((recv_version, batch)) = batches.recv().await { + tracing:: + if version < recv_version { + tracing::info!("Received version {recv_version} (prev: {version}"); + yield batch; + version = recv_version; + } else { + tracing::warn!("Ignoring version {recv_version} (already up to {version})") + } + } + } + } +} diff --git a/crates/sparrow-compiler/src/time_domain.rs b/crates/sparrow-compiler/src/time_domain.rs index 9d375d3c2..f1a99e7d4 100644 --- a/crates/sparrow-compiler/src/time_domain.rs +++ b/crates/sparrow-compiler/src/time_domain.rs @@ -1,7 +1,7 @@ use anyhow::Context; use egg::Id; use itertools::Itertools; -use sparrow_plan::TableId; +use sparrow_instructions::TableId; use sparrow_syntax::{Located, Location}; use crate::{AstDfgRef, DataContext, DiagnosticBuilder, DiagnosticCode}; @@ -11,7 +11,7 @@ use crate::{AstDfgRef, DataContext, DiagnosticBuilder, DiagnosticCode}; /// It is used to report warnings about operations between incompatible /// domains. #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub(super) enum TimeDomain { +pub enum TimeDomain { /// The TimeDomain represents an error. Error, /// The TimeDomain represents a continuous value, such as a literal or @@ -39,6 +39,10 @@ impl TimeDomain { pub fn error() -> Self { TimeDomain::Error } + + pub fn is_continuous(&self) -> bool { + matches!(self, TimeDomain::Continuous) + } } /// Return the `TimeDomain` of an operation combining the given inputs. diff --git a/crates/sparrow-compiler/src/types/inference.rs b/crates/sparrow-compiler/src/types/inference.rs index 38d4e7d8b..fa306cc7f 100644 --- a/crates/sparrow-compiler/src/types/inference.rs +++ b/crates/sparrow-compiler/src/types/inference.rs @@ -1,9 +1,9 @@ //! Utilities for working with argument and parameter types. -use std::{cmp::Ordering, sync::Arc}; +use std::cmp::Ordering; -use arrow::datatypes::{DataType, Field, Fields, TimeUnit}; -use hashbrown::hash_map::Entry; +use anyhow::Context; +use arrow::datatypes::{DataType, Field, TimeUnit}; use hashbrown::HashMap; use itertools::{izip, Itertools}; use sparrow_syntax::{ @@ -104,6 +104,7 @@ pub(crate) fn instantiate( .iter() .map(|parameter_type| instantiate_type(parameter_type, &solutions)) .collect(); + let instantiated_arguments = arguments.with_values(instantiated_arguments); let instantiated_return = if instantiated_arguments.iter().any(|t| t.is_error()) { FenlType::Error @@ -136,107 +137,11 @@ pub fn validate_instantiation( let mut types_for_variable: HashMap = HashMap::new(); for (argument_type, parameter_type) in izip!(argument_types.iter(), parameters.types()) { - if matches!(argument_type, FenlType::Concrete(DataType::Null)) { - // Skip -- null arguments satisfy any parameter. - continue; - } - - match parameter_type.inner() { - FenlType::TypeRef(type_var) => { - match types_for_variable.entry(type_var.clone()) { - Entry::Occupied(occupied) => { - // When validating, we assume that all uses of a type class are - // the same. This should be the case for the DFG and plan, since - // explicit casts have been added. - anyhow::ensure!( - occupied.get() == argument_type - || matches!(occupied.get(), FenlType::Error) - || matches!(argument_type, FenlType::Error), - "Failed type validation: expected {} but was {}", - occupied.get(), - argument_type - ); - } - Entry::Vacant(vacant) => { - vacant.insert(argument_type.clone()); - } - } - } - FenlType::Collection(Collection::Map, type_vars) => { - debug_assert!(type_vars.len() == 2); - let (key_type, value_type) = match argument_type { - FenlType::Concrete(DataType::Map(f, _)) => match f.data_type() { - DataType::Struct(fields) => { - debug_assert!(fields.len() == 2); - ( - FenlType::Concrete(fields[0].data_type().clone()), - FenlType::Concrete(fields[1].data_type().clone()), - ) - } - other => anyhow::bail!("expected struct, saw {:?}", other), - }, - other => anyhow::bail!("expected map, saw {:?}", other), - }; - - match types_for_variable.entry(type_vars[0].clone()) { - Entry::Occupied(occupied) => { - anyhow::ensure!( - occupied.get() == &key_type - || matches!(occupied.get(), FenlType::Error) - || matches!(key_type, FenlType::Error), - "Failed type validation: expected {} but was {}", - occupied.get(), - key_type - ); - } - Entry::Vacant(vacant) => { - vacant.insert(key_type.clone()); - } - } - - match types_for_variable.entry(type_vars[1].clone()) { - Entry::Occupied(occupied) => { - anyhow::ensure!( - occupied.get() == &value_type - || matches!(occupied.get(), FenlType::Error) - || matches!(value_type, FenlType::Error), - "Failed type validation: expected {} but was {}", - occupied.get(), - value_type - ); - } - Entry::Vacant(vacant) => { - vacant.insert(value_type.clone()); - } - } - } - FenlType::Collection(Collection::List, _) => { - todo!("list unsupported") - } - FenlType::Error => { - // Assume the argument matches (since we already reported what - // caused the error to appear). We may be able - // to tighten this by *ignoring* the errors during type - // inference, and just treating empty lists as - // OK if there was an error (and returning an error). Then - // `i64 + error` would assume it would be `i64`. But that could - // be invalidated if after correcting the error - // it changed to `f64`. For now, we just let the error swallow - // other errors. - } - _ => { - anyhow::ensure!( - parameter_type.inner() == argument_type - || matches!( - argument_type, - FenlType::Error | FenlType::Concrete(DataType::Null) - ), - "Failed type validation: expected {} but was {}", - parameter_type, - argument_type - ); - } - }; + validate_type( + &mut types_for_variable, + parameter_type.inner(), + argument_type, + )?; } if argument_types.iter().any(|t| t.is_error()) { @@ -247,6 +152,59 @@ pub fn validate_instantiation( Ok(instantiated_return) } +fn validate_type( + types_for_variable: &mut HashMap, + parameter: &FenlType, + argument: &FenlType, +) -> anyhow::Result<()> { + match (parameter, argument) { + (FenlType::Error, _) | (_, FenlType::Error) => { + // Assume the argument matches (since we already reported what + // caused the error to appear). We may be able + // to tighten this by *ignoring* the errors during type + // inference, and just treating empty lists as + // OK if there was an error (and returning an error). Then + // `i64 + error` would assume it would be `i64`. But that could + // be invalidated if after correcting the error + // it changed to `f64`. For now, we just let the error swallow + // other errors. + } + (_, FenlType::Concrete(DataType::Null)) => { + // Null arguments satisfy any parameter. + } + (FenlType::TypeRef(type_var), actual) => { + let expected = types_for_variable + .entry_ref(type_var) + .or_insert_with(|| actual.clone()); + anyhow::ensure!( + expected == actual, + "Failed type validation: expected {expected} but was {actual}" + ); + } + (FenlType::Collection(p_coll, p_args), argument) => { + let a_args = argument.collection_args(p_coll).with_context(|| { + format!( + "Failed type validation: Expected collection type {p_coll} but got {argument}" + ) + })?; + anyhow::ensure!( + p_args.len() == a_args.len(), + "Failed type validation: Mismatched number of type arguments {p_args:?} and {a_args:?}" + ); + for (p_arg, a_arg) in izip!(p_args, a_args) { + validate_type(types_for_variable, p_arg, &a_arg)?; + } + } + (expected, actual) => { + anyhow::ensure!( + actual == expected, + "Failed type validation: expected {expected} but was {actual}" + ); + } + } + Ok(()) +} + /// Determine the type for a type class based on the associated argument types. /// /// # Fails @@ -332,34 +290,19 @@ fn instantiate_type(fenl_type: &FenlType, solutions: &HashMap solutions .get(type_var) .cloned() - .unwrap_or(FenlType::Concrete(DataType::Null)), - FenlType::Collection(Collection::Map, type_vars) => { - debug_assert!(type_vars.len() == 2); - - let concrete_key_type = solutions - .get(&type_vars[0]) - .cloned() - .unwrap_or(FenlType::Concrete(DataType::Null)); - let concrete_value_type = solutions - .get(&type_vars[1]) - .cloned() - .unwrap_or(FenlType::Concrete(DataType::Null)); - - // `solutions` map should contain concrete types for all type variables. - let key_field = match concrete_key_type { - FenlType::Concrete(t) => Field::new("key", t, false), - other => panic!("expected concrete type, got {:?}", other), - }; - let value_field = match concrete_value_type { - FenlType::Concrete(t) => Field::new("value", t, false), - other => panic!("expected concrete type, got {:?}", other), + .unwrap_or_else(|| panic!("missing solution for type variable {:?}", type_var)), + FenlType::Collection(collection, type_args) => { + match collection { + Collection::List => debug_assert!(type_args.len() == 1), + Collection::Map => debug_assert!(type_args.len() == 2), }; - let fields = Fields::from(vec![key_field, value_field]); - let s = Arc::new(Field::new("entries", DataType::Struct(fields), false)); - FenlType::Concrete(DataType::Map(s, false)) + let type_args = type_args + .iter() + .map(|t| instantiate_type(t, solutions)) + .collect(); + FenlType::Collection(*collection, type_args).normalize() } - FenlType::Collection(Collection::List, _) => todo!("unsupported"), FenlType::Concrete(_) => fenl_type.clone(), FenlType::Window => fenl_type.clone(), FenlType::Json => fenl_type.clone(), @@ -724,16 +667,16 @@ impl TypeConstraints { self.0.entry_ref(variable).or_default().push(ty) } - fn constrain_all( + fn unify_all( &mut self, - variable: &[TypeVariable], - types: Vec>, + parameters: &[FenlType], + arguments: Vec>, ) -> Result<(), ()> { - if variable.len() != types.len() { + if parameters.len() != arguments.len() { return Err(()); } - for (variable, ty) in variable.iter().zip(types) { - self.constrain(variable, ty); + for (parameter, argument) in parameters.iter().zip(arguments) { + self.unify_one(parameter, argument)?; } Ok(()) } @@ -742,53 +685,17 @@ impl TypeConstraints { fn unify_one(&mut self, parameter: &FenlType, argument: Located) -> Result<(), ()> { match (parameter, argument.inner()) { (FenlType::TypeRef(variable), _) => self.constrain(variable, argument), - (FenlType::Collection(p_collection, p_type_vars), arg_type) => match arg_type { - FenlType::Concrete(DataType::List(field)) => { - debug_assert_eq!( - p_type_vars.len(), - 1, - "List type must have one type variable" - ); - self.constrain_all( - p_type_vars, - vec![argument.with_value(FenlType::Concrete(field.data_type().clone()))], - )?; - } - FenlType::Concrete(DataType::Map(s, _)) => { - debug_assert_eq!( - p_type_vars.len(), - 2, - "Map type must have two type variables" - ); - let DataType::Struct(fields) = s.data_type() else { - panic!("Map type has a struct type with key/value") - }; - debug_assert_eq!(fields.len(), 2, "Map type struct should have two fields"); - self.constrain_all( - p_type_vars, - vec![ - argument.with_value(FenlType::Concrete(fields[0].data_type().clone())), - argument.with_value(FenlType::Concrete(fields[1].data_type().clone())), - ], - )?; - } - FenlType::Collection(a_collection, a_type_vars) => { - if a_collection != p_collection { - return Err(()); - } + (FenlType::Collection(p_collection, p_type_vars), arg_type) => { + let Some(args) = arg_type.collection_args(p_collection) else { + return Err(()); + }; - self.constrain_all( - p_type_vars, - a_type_vars - .iter() - .map(|a_type_var| { - argument.with_value(FenlType::TypeRef(a_type_var.clone())) - }) - .collect(), - )?; - } - _ => return Err(()), - }, + let arguments = args + .into_iter() + .map(|arg| argument.with_value(arg)) + .collect(); + self.unify_all(p_type_vars, arguments)?; + } (_, FenlType::Error) => { // The argument is an error, but we already reported it. // Don't need to do anything. @@ -851,7 +758,6 @@ impl TypeConstraints { #[cfg(test)] mod tests { use std::borrow::Cow; - use std::str::FromStr; use sparrow_syntax::{FeatureSetPart, FenlType, Resolved, Signature}; @@ -867,9 +773,10 @@ mod tests { let argument_types = argument_types .iter() .map(|s| { + let part_id = FeatureSetPart::Internal(s); Located::internal_str(s).transform(|s| { - FenlType::from_str(s) - .unwrap_or_else(|e| panic!("'{s}' is not valid as a type: {e}")) + FenlType::try_from_str(part_id, s) + .unwrap_or_else(|e| panic!("'{s}' is not valid as a type: {e:?}")) }) }) .collect(); @@ -967,4 +874,46 @@ mod tests { Ok("(key: i32, map: map) -> i32".to_owned()) ); } + + #[test] + fn test_instantiate_flatten() { + const FLATTEN_SIGNATURE: &str = "flatten(list: list>) -> list"; + + assert_eq!( + instantiate_types(FLATTEN_SIGNATURE, &["list>"]), + Ok("(list: list>) -> list".to_owned()) + ); + } + + fn validate_instantiation( + signature_str: &'static str, + argument_types: &[&'static str], + ) -> anyhow::Result { + let signature = signature(signature_str); + let argument_types = argument_types + .iter() + .map(|s| { + let part_id = FeatureSetPart::Internal(s); + FenlType::try_from_str(part_id, s) + .unwrap_or_else(|e| panic!("'{s}' is not valid as a type: {e:?}")) + }) + .collect(); + + let argument_types = Resolved::new( + Cow::Owned(signature.parameters().names().to_owned()), + argument_types, + signature.parameters().has_vararg, + ); + super::validate_instantiation(&argument_types, &signature) + } + + #[test] + fn test_length_validation() { + const LENGTH_SIGNATURE: &str = "length(list: list) -> i32"; + + assert_eq!( + validate_instantiation(LENGTH_SIGNATURE, &["list"]).unwrap(), + FenlType::Concrete(DataType::Int32) + ) + } } diff --git a/crates/sparrow-compiler/src/types/instruction.rs b/crates/sparrow-compiler/src/types/instruction.rs index 7128c1f56..08de8c816 100644 --- a/crates/sparrow-compiler/src/types/instruction.rs +++ b/crates/sparrow-compiler/src/types/instruction.rs @@ -5,8 +5,8 @@ use arrow::datatypes::{DataType, Field}; use itertools::{izip, Itertools}; use sparrow_arrow::scalar_value::ScalarValue; use sparrow_instructions::CastEvaluator; -use sparrow_plan::{InstKind, Mode}; -use sparrow_syntax::{ArgVec, FenlType, Resolved}; +use sparrow_instructions::InstKind; +use sparrow_syntax::{ArgVec, Collection, FenlType, Resolved}; use crate::types::inference::validate_instantiation; @@ -20,11 +20,10 @@ pub(crate) fn typecheck_inst( inst: &InstKind, argument_types: ArgVec, argument_literals: &[Option], - mode: Mode, ) -> anyhow::Result { match inst { InstKind::Simple(instruction) => { - let signature = instruction.signature(mode); + let signature = instruction.signature(); let argument_types = Resolved::new( Cow::Borrowed(signature.parameters().names()), argument_types, @@ -33,6 +32,16 @@ pub(crate) fn typecheck_inst( validate_instantiation(&argument_types, signature) } + InstKind::Udf(udf) => { + let signature = udf.signature(); + let argument_types = Resolved::new( + Cow::Owned(signature.parameters().names().to_owned()), + argument_types, + signature.parameters().has_vararg, + ); + + validate_instantiation(&argument_types, signature) + } InstKind::FieldRef => { anyhow::ensure!( argument_types.len() == 2, @@ -54,32 +63,7 @@ pub(crate) fn typecheck_inst( ); }; - if let FenlType::Concrete(DataType::Struct(fields)) = &argument_types[0] { - // TODO: Handle nullability? - let result_type = fields - .iter() - .find_map(|field| { - if field.name() == field_name { - Some(FenlType::Concrete(field.data_type().clone())) - } else { - None - } - }) - .with_context(|| { - format!( - "No field named '{}' found in struct {:?}", - field_name, argument_types[0] - ) - })?; - - Ok(result_type) - } else { - Err(anyhow!( - "Unable to access field {} of type {:?}", - field_name, - argument_types[0] - )) - } + typecheck_field_ref(field_name, &argument_types[0]) } InstKind::Cast(data_type) => { anyhow::ensure!( @@ -139,3 +123,32 @@ pub(crate) fn typecheck_inst( } } } + +fn typecheck_field_ref(field_name: &str, base: &FenlType) -> anyhow::Result { + if let FenlType::Concrete(DataType::Struct(fields)) = base { + // TODO: Handle nullability? + let result_type = fields + .iter() + .find_map(|field| { + if field.name() == field_name { + Some(FenlType::Concrete(field.data_type().clone())) + } else { + None + } + }) + .with_context(|| { + format!("No field named '{}' found in struct {:?}", field_name, base) + })?; + + Ok(result_type) + } else if let Some(args) = base.collection_args(&sparrow_syntax::Collection::List) { + let field = typecheck_field_ref(field_name, &args[0])?; + Ok(FenlType::Collection(Collection::List, vec![field]).normalize()) + } else { + Err(anyhow!( + "Unable to access field {} of type {:?}", + field_name, + base + )) + } +} diff --git a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__count_since_window.snap b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__count_since_window.snap index e539af234..79515b038 100644 --- a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__count_since_window.snap +++ b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__count_since_window.snap @@ -48,12 +48,11 @@ operations: - arguments: [] result_type: kind: - Primitive: 6 + Primitive: 1 output: false operator: Literal: - literal: - Int64: 1 + literal: ~ - arguments: [] result_type: kind: diff --git a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_diff_grouping.snap b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_diff_grouping.snap index 87f4a49cf..88330bdf2 100644 --- a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_diff_grouping.snap +++ b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_diff_grouping.snap @@ -227,8 +227,11 @@ operations: result_type: kind: List: - kind: - Primitive: 10 + name: item + item_type: + kind: + Primitive: 10 + nullable: true output: true operator: Input: @@ -251,8 +254,11 @@ operations: result_type: kind: List: - kind: - Primitive: 10 + name: item + item_type: + kind: + Primitive: 10 + nullable: true output: true operator: Input: diff --git a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_same_grouping.snap b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_same_grouping.snap index d9f029d3a..9970a70fb 100644 --- a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_same_grouping.snap +++ b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_same_grouping.snap @@ -217,8 +217,11 @@ operations: result_type: kind: List: - kind: - Primitive: 10 + name: item + item_type: + kind: + Primitive: 10 + nullable: true output: true operator: Input: @@ -241,8 +244,11 @@ operations: result_type: kind: List: - kind: - Primitive: 10 + name: item + item_type: + kind: + Primitive: 10 + nullable: true output: true operator: Input: diff --git a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_same_grouping_with_slicing.snap b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_same_grouping_with_slicing.snap index c1d5b07ba..72ae1e7ef 100644 --- a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_same_grouping_with_slicing.snap +++ b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__lookup_same_grouping_with_slicing.snap @@ -219,8 +219,11 @@ operations: result_type: kind: List: - kind: - Primitive: 10 + name: item + item_type: + kind: + Primitive: 10 + nullable: true output: true operator: Input: @@ -243,8 +246,11 @@ operations: result_type: kind: List: - kind: - Primitive: 10 + name: item + item_type: + kind: + Primitive: 10 + nullable: true output: true operator: Input: diff --git a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__since_daily.snap b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__since_daily.snap index 9c96c1950..69dd3dd18 100644 --- a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__since_daily.snap +++ b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__since_daily.snap @@ -122,12 +122,11 @@ operations: - arguments: [] result_type: kind: - Primitive: 6 + Primitive: 1 output: false operator: Literal: - literal: - Int64: 1 + literal: ~ - arguments: [] result_type: kind: diff --git a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__since_daily_multiple_passes.snap b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__since_daily_multiple_passes.snap index 04051408a..09e53dc22 100644 --- a/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__since_daily_multiple_passes.snap +++ b/crates/sparrow-compiler/tests/snapshots/compiler_golden_tests__since_daily_multiple_passes.snap @@ -153,12 +153,11 @@ operations: - arguments: [] result_type: kind: - Primitive: 6 + Primitive: 1 output: false operator: Literal: - literal: - Int64: 1 + literal: ~ - arguments: [] result_type: kind: @@ -289,12 +288,11 @@ operations: - arguments: [] result_type: kind: - Primitive: 6 + Primitive: 1 output: false operator: Literal: - literal: - Int64: 1 + literal: ~ - arguments: [] result_type: kind: diff --git a/crates/sparrow-execution/Cargo.toml b/crates/sparrow-execution/Cargo.toml new file mode 100644 index 000000000..43c778775 --- /dev/null +++ b/crates/sparrow-execution/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "sparrow-execution" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +publish = false +description = """ +Implementations of the pipelines to be executed. +""" + +[dependencies] +derive_more.workspace = true +error-stack.workspace = true +parking_lot.workspace = true +sparrow-arrow = { path = "../sparrow-arrow" } +sparrow-physical = { path = "../sparrow-physical" } +sparrow-transforms = { path = "../sparrow-transforms" } +sparrow-scheduler = { path = "../sparrow-scheduler" } +tokio.workspace = true + +[dev-dependencies] +arrow-array.workspace = true +arrow-schema.workspace = true +index_vec.workspace = true +sparrow-testing = { path = "../sparrow-testing" } + +[lib] +doctest = false diff --git a/crates/sparrow-execution/src/lib.rs b/crates/sparrow-execution/src/lib.rs new file mode 100644 index 000000000..763ff950f --- /dev/null +++ b/crates/sparrow-execution/src/lib.rs @@ -0,0 +1,256 @@ +#![warn( + rust_2018_idioms, + nonstandard_style, + future_incompatible, + clippy::mod_module_files, + clippy::print_stdout, + clippy::print_stderr, + clippy::undocumented_unsafe_blocks +)] + +//! Implementations of the pipelines to be executed. + +#[cfg(test)] +mod tests { + + use std::sync::Arc; + + use arrow_array::cast::AsArray; + use arrow_array::{Int64Array, RecordBatch, TimestampNanosecondArray, UInt64Array}; + use arrow_schema::{DataType, Field, Schema, SchemaRef}; + use error_stack::{IntoReport, ResultExt}; + use index_vec::index_vec; + use parking_lot::Mutex; + use sparrow_arrow::scalar_value::ScalarValue; + use sparrow_arrow::{Batch, RowTime}; + use sparrow_scheduler::{Pipeline, PipelineError, PipelineInput, WorkerPool}; + use sparrow_transforms::TransformPipeline; + + #[derive(derive_more::Display, Debug)] + pub enum Error { + #[display(fmt = "error creating executor")] + Creating, + #[display(fmt = "error executing")] + Executing, + } + + impl error_stack::Context for Error {} + + #[tokio::test] + #[ignore] + async fn test_query() { + sparrow_testing::init_test_logging(); + + let (input_tx, input_rx) = tokio::sync::mpsc::channel(10); + let (output_tx, mut output_rx) = tokio::sync::mpsc::channel(10); + + let input_schema = Arc::new(Schema::new(vec![ + Field::new("a", DataType::Int64, true), + Field::new("b", DataType::Int64, true), + Field::new("c", DataType::Int64, true), + ])); + + let output_schema = Arc::new(Schema::new(vec![ + Field::new("ab", DataType::Int64, true), + Field::new("abc", DataType::Int64, true), + ])); + + let input_batch = RecordBatch::try_new( + input_schema.clone(), + vec![ + Arc::new(Int64Array::from(vec![0, 1, 2, 3])), + Arc::new(Int64Array::from(vec![4, 7, 10, 11])), + Arc::new(Int64Array::from(vec![Some(21), None, Some(387), Some(87)])), + ], + ) + .unwrap(); + let time = Arc::new(TimestampNanosecondArray::from(vec![0, 1, 2, 3])); + let subsort = Arc::new(UInt64Array::from(vec![0, 1, 2, 3])); + let key_hash = Arc::new(UInt64Array::from(vec![0, 1, 2, 3])); + let input_batch = Batch::new_with_data( + input_batch, + time, + subsort, + key_hash, + RowTime::from_timestamp_ns(3), + ); + input_tx.send(input_batch).await.unwrap(); + std::mem::drop(input_tx); + + execute( + "hello".to_owned(), + input_schema, + input_rx, + output_schema, + output_tx, + ) + .await + .unwrap(); + + let output = output_rx.recv().await.unwrap(); + let output = output.into_record_batch().unwrap(); + let ab = output.column_by_name("ab").unwrap(); + let abc = output.column_by_name("abc").unwrap(); + assert_eq!(ab.as_primitive(), &Int64Array::from(vec![4, 8, 12, 14])); + assert_eq!( + abc.as_primitive(), + &Int64Array::from(vec![Some(25), None, Some(399), Some(101)]) + ); + } + + /// Execute a physical plan. + pub async fn execute( + query_id: String, + input_schema: SchemaRef, + mut input: tokio::sync::mpsc::Receiver, + output_schema: SchemaRef, + output: tokio::sync::mpsc::Sender, + ) -> error_stack::Result<(), Error> { + let mut worker_pool = WorkerPool::start(query_id).change_context(Error::Creating)?; + + // This sets up some fake stuff: + // - We don't have sources / sinks yet, so we use tokio channels. + // - We create a "hypothetical" scan step (0) + // - We create a hard-coded "project" step (1) + // - We output the results to the channel. + + let scan = sparrow_physical::Step { + id: 0.into(), + kind: sparrow_physical::StepKind::Scan { + table_name: "table".to_owned(), + }, + inputs: vec![], + schema: input_schema, + }; + + let project = sparrow_physical::Step { + id: 1.into(), + kind: sparrow_physical::StepKind::Project { + exprs: sparrow_physical::Exprs { + exprs: index_vec![ + sparrow_physical::Expr { + name: "column".into(), + literal_args: vec![ScalarValue::Utf8(Some("a".to_owned()))], + args: vec![], + result_type: DataType::Int64 + }, + sparrow_physical::Expr { + name: "column".into(), + literal_args: vec![ScalarValue::Utf8(Some("b".to_owned()))], + args: vec![], + result_type: DataType::Int64 + }, + sparrow_physical::Expr { + name: "add".into(), + literal_args: vec![], + args: vec![0.into(), 1.into()], + result_type: DataType::Int64 + }, + sparrow_physical::Expr { + name: "column".into(), + literal_args: vec![ScalarValue::Utf8(Some("c".to_owned()))], + args: vec![], + result_type: DataType::Int64 + }, + sparrow_physical::Expr { + name: "add".into(), + literal_args: vec![], + args: vec![2.into(), 3.into()], + result_type: DataType::Int64 + }, + ], + outputs: vec![2.into(), 4.into()], + }, + }, + inputs: vec![0.into()], + schema: output_schema, + }; + + let sink_pipeline = worker_pool.add_pipeline(1, WriteChannelPipeline::new(output)); + let transform_pipeline = worker_pool.add_pipeline( + 1, + TransformPipeline::try_new( + &scan, + vec![project].iter(), + PipelineInput::new(sink_pipeline, 0), + ) + .change_context(Error::Creating)?, + ); + let transform_pipeline_input = PipelineInput::new(transform_pipeline, 0); + + let mut injector = worker_pool.injector().clone(); + while let Some(batch) = input.recv().await { + transform_pipeline_input + .add_input(0.into(), batch, &mut injector) + .change_context(Error::Executing)?; + } + transform_pipeline_input + .close_input(0.into(), &mut injector) + .change_context(Error::Executing)?; + worker_pool.stop().change_context(Error::Executing)?; + + Ok(()) + } + + #[derive(Debug)] + struct WriteChannelPipeline(Mutex>>); + + impl WriteChannelPipeline { + fn new(channel: tokio::sync::mpsc::Sender) -> Self { + Self(Mutex::new(Some(channel))) + } + } + + impl Pipeline for WriteChannelPipeline { + fn initialize( + &mut self, + _tasks: sparrow_scheduler::Partitioned, + ) { + } + + fn add_input( + &self, + input_partition: sparrow_scheduler::Partition, + input: usize, + batch: Batch, + _scheduler: &mut dyn sparrow_scheduler::Scheduler, + ) -> error_stack::Result<(), PipelineError> { + let channel = self.0.lock(); + channel + .as_ref() + .ok_or(PipelineError::InputClosed { + input, + input_partition, + })? + .blocking_send(batch) + .into_report() + .change_context(PipelineError::Execution) + } + + fn close_input( + &self, + input_partition: sparrow_scheduler::Partition, + input: usize, + _scheduler: &mut dyn sparrow_scheduler::Scheduler, + ) -> error_stack::Result<(), PipelineError> { + let mut channel = self.0.lock(); + error_stack::ensure!( + channel.is_some(), + PipelineError::InputClosed { + input, + input_partition, + }, + ); + *channel = None; + Ok(()) + } + + fn do_work( + &self, + _partition: sparrow_scheduler::Partition, + _scheduler: &mut dyn sparrow_scheduler::Scheduler, + ) -> error_stack::Result<(), PipelineError> { + Ok(()) + } + } +} diff --git a/crates/sparrow-expressions/src/error.rs b/crates/sparrow-expressions/src/error.rs index 6f25e80a2..69bd39151 100644 --- a/crates/sparrow-expressions/src/error.rs +++ b/crates/sparrow-expressions/src/error.rs @@ -10,6 +10,11 @@ pub enum Error { expected: DataType, actual: DataType, }, + #[display(fmt = "invalid result type: expected {expected:?} but was {actual:?}")] + InvalidResultType { + expected: DataType, + actual: DataType, + }, #[display(fmt = "invalid argument type: expected struct but was {actual:?}")] InvalidNonStructArgumentType { actual: DataType }, #[display(fmt = "invalid result type: expected struct but was {actual:?}")] diff --git a/crates/sparrow-expressions/src/evaluators.rs b/crates/sparrow-expressions/src/evaluators.rs index 7ee9be43c..b8c56920f 100644 --- a/crates/sparrow-expressions/src/evaluators.rs +++ b/crates/sparrow-expressions/src/evaluators.rs @@ -94,11 +94,11 @@ impl<'a> StaticInfo<'a> { /// Return the scalar value corresponding to the exactly-one literal arguments. fn literal(&self) -> error_stack::Result<&'a ScalarValue, Error> { error_stack::ensure!( - self.args.len() == 1, + self.literal_args.len() == 1, Error::InvalidLiteralCount { name: self.name.clone(), expected: 1, - actual: self.args.len() + actual: self.literal_args.len() } ); Ok(&self.literal_args[0]) diff --git a/crates/sparrow-expressions/src/evaluators/column.rs b/crates/sparrow-expressions/src/evaluators/column.rs index f74742bc8..e68ce27a8 100644 --- a/crates/sparrow-expressions/src/evaluators/column.rs +++ b/crates/sparrow-expressions/src/evaluators/column.rs @@ -6,7 +6,7 @@ use crate::Error; inventory::submit!(crate::evaluators::EvaluatorFactory { name: "column", - create: &create + create: &create, }); /// Evaluator for column reference (`.c`).. @@ -26,7 +26,13 @@ fn create(info: super::StaticInfo<'_>) -> error_stack::Result .input_schema .column_with_name(name) .expect("missing column"); - debug_assert_eq!(field.data_type(), info.result_type); + error_stack::ensure!( + field.data_type() == info.result_type, + Error::InvalidResultType { + expected: field.data_type().clone(), + actual: info.result_type.clone() + } + ); Ok(Box::new(ColumnEvaluator { column })) } diff --git a/crates/sparrow-expressions/src/evaluators/hash.rs b/crates/sparrow-expressions/src/evaluators/hash.rs index 89910db1a..d49177e70 100644 --- a/crates/sparrow-expressions/src/evaluators/hash.rs +++ b/crates/sparrow-expressions/src/evaluators/hash.rs @@ -1,9 +1,7 @@ use std::sync::Arc; -use arrow_array::{ArrayRef, UInt64Array}; -use arrow_buffer::Buffer; -use error_stack::{IntoReport, ResultExt}; -use sparrow_arrow::hasher::Hasher; +use arrow_array::ArrayRef; +use error_stack::ResultExt; use crate::evaluator::Evaluator; use crate::evaluators::StaticInfo; @@ -11,8 +9,6 @@ use crate::values::ArrayRefValue; use crate::work_area::WorkArea; use crate::Error; -use std::cell::RefCell; - inventory::submit!(crate::evaluators::EvaluatorFactory { name: "hash", create: &create @@ -23,27 +19,11 @@ struct HashEvaluator { input: ArrayRefValue, } -thread_local! { - /// Thread-local hasher. - /// - /// TODO: Move this to the hasher and make it easy to automatically - /// use this instance. - static HASHER: RefCell = RefCell::new(Hasher::default()); -} - impl Evaluator for HashEvaluator { fn evaluate(&self, info: &WorkArea<'_>) -> error_stack::Result { let input = info.expression(self.input); - let hashes = HASHER.with(|hasher| -> error_stack::Result { - let mut hasher = hasher.borrow_mut(); - let hashes = hasher - .hash_array(input.as_ref()) - .change_context(Error::ExprEvaluation)?; - let hashes = Buffer::from_slice_ref(hashes); - UInt64Array::try_new(hashes.into(), None) - .into_report() - .change_context(Error::ExprEvaluation) - })?; + let hashes = + sparrow_arrow::hash::hash(input.as_ref()).change_context(Error::ExprEvaluation)?; Ok(Arc::new(hashes)) } diff --git a/crates/sparrow-instructions/Cargo.toml b/crates/sparrow-instructions/Cargo.toml index 4580bfd56..530aeb6e3 100644 --- a/crates/sparrow-instructions/Cargo.toml +++ b/crates/sparrow-instructions/Cargo.toml @@ -17,6 +17,7 @@ bincode.workspace = true bit-set.workspace = true bitvec.workspace = true chrono.workspace = true +enum-map.workspace = true erased-serde.workspace = true error-stack.workspace = true derive_more.workspace = true @@ -24,6 +25,7 @@ hashbrown.workspace = true itertools.workspace = true num.workspace = true owning_ref.workspace = true +parse-display.workspace = true prost.workspace = true prost-wkt-types.workspace = true rocksdb.workspace = true @@ -33,13 +35,15 @@ smallvec.workspace = true sparrow-api = { path = "../sparrow-api" } sparrow-arrow = { path = "../sparrow-arrow" } sparrow-kernels = { path = "../sparrow-kernels" } -sparrow-plan = { path = "../sparrow-plan" } sparrow-syntax = { path = "../sparrow-syntax" } static_init.workspace = true +strum.workspace = true +strum_macros.workspace = true tempfile.workspace = true tonic.workspace = true tracing.workspace = true lz4-sys.workspace = true +uuid.workspace = true [dev-dependencies] approx.workspace = true diff --git a/crates/sparrow-instructions/src/aggregation_args.rs b/crates/sparrow-instructions/src/aggregation_args.rs index f314953cb..6dfc71ca4 100644 --- a/crates/sparrow-instructions/src/aggregation_args.rs +++ b/crates/sparrow-instructions/src/aggregation_args.rs @@ -1,5 +1,5 @@ +use crate::ValueRef; use anyhow::anyhow; -use sparrow_plan::ValueRef; use crate::StaticArg; diff --git a/crates/sparrow-instructions/src/evaluators.rs b/crates/sparrow-instructions/src/evaluators.rs index 84276df3f..721f4c0e1 100644 --- a/crates/sparrow-instructions/src/evaluators.rs +++ b/crates/sparrow-instructions/src/evaluators.rs @@ -1,7 +1,7 @@ +use crate::{InstKind, InstOp, ValueRef}; use arrow::array::ArrayRef; use arrow::datatypes::DataType; use itertools::Itertools; -use sparrow_plan::{InstKind, InstOp}; use crate::evaluators::macros::{ create_float_evaluator, create_number_evaluator, create_ordered_evaluator, @@ -16,6 +16,7 @@ mod equality; mod field_ref; mod general; mod json_field; +mod list; mod logical; mod macros; mod map; @@ -31,11 +32,11 @@ use equality::*; use field_ref::*; use general::*; use json_field::*; +use list::*; use logical::*; use map::*; use math::*; use record::*; -use sparrow_plan::ValueRef; use string::*; use time::*; @@ -159,6 +160,22 @@ pub fn create_evaluator(info: StaticInfo<'_>) -> anyhow::Result Ok(RecordEvaluator::try_new(info)?), + InstKind::Udf(udf) => Ok(udf.make_evaluator(info)), + } +} + +/// Placeholder struct for unsupported evaluators. +struct UnsupportedEvaluator; + +impl Evaluator for UnsupportedEvaluator { + fn evaluate(&mut self, _info: &dyn RuntimeInfo) -> anyhow::Result { + anyhow::bail!("Unsupported evaluator") + } +} + +impl EvaluatorFactory for UnsupportedEvaluator { + fn try_new(_info: StaticInfo<'_>) -> anyhow::Result> { + anyhow::bail!("Unsupported evaluator") } } @@ -177,6 +194,18 @@ fn create_simple_evaluator( create_number_evaluator!(&info.args[0].data_type, ClampEvaluator, info) } InstOp::Coalesce => CoalesceEvaluator::try_new(info), + InstOp::Collect => { + create_typed_evaluator!( + &info.args[0].data_type, + CollectPrimitiveEvaluator, + CollectStructEvaluator, + UnsupportedEvaluator, + UnsupportedEvaluator, + CollectBooleanEvaluator, + CollectStringEvaluator, + info + ) + } InstOp::CountIf => CountIfEvaluator::try_new(info), InstOp::DayOfMonth => DayOfMonthEvaluator::try_new(info), InstOp::DayOfMonth0 => DayOfMonth0Evaluator::try_new(info), @@ -195,6 +224,8 @@ fn create_simple_evaluator( create_typed_evaluator!( &info.args[0].data_type, ArrowAggEvaluator, + UnsupportedEvaluator, + FirstListEvaluator, FirstMapEvaluator, FirstBooleanEvaluator, FirstStringEvaluator, @@ -202,8 +233,10 @@ fn create_simple_evaluator( info ) } + InstOp::Flatten => FlattenEvaluator::try_new(info), InstOp::Floor => FloorEvaluator::try_new(info), InstOp::Get => GetEvaluator::try_new(info), + InstOp::Index => IndexEvaluator::try_new(info), InstOp::Gt => match (info.args[0].is_literal(), info.args[1].is_literal()) { (_, true) => { create_ordered_evaluator!(&info.args[0].data_type, GtScalarEvaluator, info) @@ -239,13 +272,12 @@ fn create_simple_evaluator( // rely on simplification for conversion. InstOp::Json => anyhow::bail!("No evaluator defined for json function"), InstOp::JsonField => JsonFieldEvaluator::try_new(info), - InstOp::Lag => { - create_ordered_evaluator!(&info.args[1].data_type, PrimitiveLagEvaluator, info) - } InstOp::Last => { create_typed_evaluator!( &info.args[0].data_type, ArrowAggEvaluator, + UnsupportedEvaluator, + LastListEvaluator, LastMapEvaluator, LastBooleanEvaluator, LastStringEvaluator, @@ -254,6 +286,7 @@ fn create_simple_evaluator( ) } InstOp::Len => LenEvaluator::try_new(info), + InstOp::ListLen => ListLenEvaluator::try_new(info), InstOp::LogicalAnd => LogicalAndKleeneEvaluator::try_new(info), InstOp::LogicalOr => LogicalOrKleeneEvaluator::try_new(info), InstOp::Lower => LowerEvaluator::try_new(info), @@ -320,6 +353,7 @@ fn create_simple_evaluator( } InstOp::TimeOf => TimeOfEvaluator::try_new(info), InstOp::Upper => UpperEvaluator::try_new(info), + InstOp::Union => UnionEvaluator::try_new(info), InstOp::Variance => { create_number_evaluator!( &info.args[0].data_type, diff --git a/crates/sparrow-instructions/src/evaluators/aggregation.rs b/crates/sparrow-instructions/src/evaluators/aggregation.rs index e2abedd64..0ce7c1bce 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation.rs @@ -1,6 +1,7 @@ mod boolean; mod function; mod generic; +mod list; mod map; mod numeric_properties; mod primitive; @@ -11,6 +12,7 @@ mod two_stacks; pub use boolean::*; pub use function::*; pub use generic::*; +pub use list::*; pub use map::*; pub use numeric_properties::*; pub use primitive::*; diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/boolean/first_boolean_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/boolean/first_boolean_evaluator.rs index 33ee97565..7ad8155db 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/boolean/first_boolean_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/boolean/first_boolean_evaluator.rs @@ -1,9 +1,9 @@ use std::sync::Arc; +use crate::ValueRef; use arrow::array::{Array, ArrayRef, BooleanArray, UInt32Array}; use itertools::izip; use sparrow_arrow::downcast::downcast_boolean_array; -use sparrow_plan::ValueRef; use super::two_stacks_first_boolean_evaluator::TwoStacksFirstBooleanEvaluator; use crate::{ diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/boolean/last_boolean_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/boolean/last_boolean_evaluator.rs index d82c15bbc..83d4fcbe0 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/boolean/last_boolean_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/boolean/last_boolean_evaluator.rs @@ -1,9 +1,9 @@ use std::sync::Arc; +use crate::ValueRef; use arrow::array::{Array, ArrayRef, BooleanArray, UInt32Array}; use itertools::izip; use sparrow_arrow::downcast::downcast_boolean_array; -use sparrow_plan::ValueRef; use super::two_stacks_last_boolean_evaluator::TwoStacksLastBooleanEvaluator; use crate::{ diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/boolean/two_stacks_first_boolean_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/boolean/two_stacks_first_boolean_evaluator.rs index 272bf5283..4294fa041 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/boolean/two_stacks_first_boolean_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/boolean/two_stacks_first_boolean_evaluator.rs @@ -1,11 +1,11 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::anyhow; use arrow::array::{Array, ArrayRef, BooleanArray, UInt32Array}; use arrow::datatypes::Int64Type; use itertools::izip; use sparrow_arrow::downcast::downcast_boolean_array; -use sparrow_plan::ValueRef; use crate::{ AggregationArgs, Evaluator, FirstBoolean, RuntimeInfo, StateToken, TwoStacks, diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/boolean/two_stacks_last_boolean_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/boolean/two_stacks_last_boolean_evaluator.rs index 097adfd94..5166be660 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/boolean/two_stacks_last_boolean_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/boolean/two_stacks_last_boolean_evaluator.rs @@ -1,11 +1,11 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::anyhow; use arrow::array::{Array, ArrayRef, BooleanArray, UInt32Array}; use arrow::datatypes::Int64Type; use itertools::izip; use sparrow_arrow::downcast::downcast_boolean_array; -use sparrow_plan::ValueRef; use crate::{ AggregationArgs, Evaluator, LastBoolean, RuntimeInfo, StateToken, TwoStacks, diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/generic/count_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/generic/count_evaluator.rs index 42d21c604..0d8fc7f99 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/generic/count_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/generic/count_evaluator.rs @@ -1,10 +1,10 @@ use std::sync::Arc; +use crate::ValueRef; use arrow::array::{Array, ArrayRef, BooleanArray, PrimitiveArray, UInt32Array}; use arrow::datatypes::UInt32Type; use itertools::izip; use sparrow_arrow::downcast::downcast_boolean_array; -use sparrow_plan::ValueRef; use super::two_stacks_count_evaluator::TwoStacksCountIfEvaluator; use crate::{ diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/generic/two_stacks_count_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/generic/two_stacks_count_evaluator.rs index 94294bc7e..7c78935fe 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/generic/two_stacks_count_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/generic/two_stacks_count_evaluator.rs @@ -1,11 +1,11 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::anyhow; use arrow::array::{Array, ArrayRef, BooleanArray, PrimitiveArray, UInt32Array}; use arrow::datatypes::{Int64Type, UInt32Type}; use itertools::izip; use sparrow_arrow::downcast::downcast_boolean_array; -use sparrow_plan::ValueRef; use crate::{AggregationArgs, Count, Evaluator, RuntimeInfo, StateToken, TwoStacksCountAccumToken}; diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/list.rs b/crates/sparrow-instructions/src/evaluators/aggregation/list.rs new file mode 100644 index 000000000..19905c335 --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/aggregation/list.rs @@ -0,0 +1,7 @@ +//! List aggregation evaluators. + +mod first_list_evaluator; +mod last_list_evaluator; + +pub use first_list_evaluator::*; +pub use last_list_evaluator::*; diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/list/first_list_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/list/first_list_evaluator.rs new file mode 100644 index 000000000..ba5c3ada2 --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/aggregation/list/first_list_evaluator.rs @@ -0,0 +1,197 @@ +use std::sync::Arc; + +use crate::ValueRef; +use crate::{ + AggregationArgs, Evaluator, EvaluatorFactory, ListAccumToken, RuntimeInfo, StateToken, + StaticInfo, +}; +use arrow::array::{ + as_list_array, new_empty_array, Array, ArrayRef, AsArray, PrimitiveArray, UInt32Array, +}; + +/// Evaluator for the `First` instruction on lists +pub struct FirstListEvaluator { + args: AggregationArgs, + token: ListAccumToken, +} + +impl Evaluator for FirstListEvaluator { + fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + match &self.args { + AggregationArgs::NoWindow { input } => { + let grouping = info.grouping(); + let input_vals = info.value(input)?.array_ref()?; + let result = Self::aggregate( + &mut self.token, + grouping.num_groups(), + grouping.group_indices(), + &input_vals, + ); + + result + } + AggregationArgs::Since { ticks: _, input: _ } => { + unimplemented!("windowed aggregation over lists") + } + AggregationArgs::Sliding { .. } => { + panic!("expected non-windowed or since-windowed aggregation, saw sliding.") + } + } + } + + fn state_token(&self) -> Option<&dyn StateToken> { + Some(&self.token) + } + + fn state_token_mut(&mut self) -> Option<&mut dyn StateToken> { + Some(&mut self.token) + } +} + +impl EvaluatorFactory for FirstListEvaluator { + fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { + let args = AggregationArgs::from_input(info.args)?; + match args { + AggregationArgs::NoWindow { .. } | AggregationArgs::Since { .. } => { + let list_type = info.result_type; + let accum = new_empty_array(list_type).as_list::().to_owned(); + let token = ListAccumToken::new(Arc::new(accum)); + Ok(Box::new(Self { token, args })) + } + AggregationArgs::Sliding { .. } => { + unimplemented!("sliding window aggregation over list unsupported") + } + } + } +} + +impl FirstListEvaluator { + /// Resizes the accumulator to the new size. + fn ensure_entity_capacity(token: &mut ListAccumToken, len: usize) -> anyhow::Result<()> { + token.resize(len) + } + + /// Returns the existing value for an entity if it exists, or a new value from the + /// input if it exists, or null if neither. + /// + /// Takes advantage of the `take` and `concat` kernels to avoid having to type the + /// evaluator, keeping everything as ArrayRefs. + /// + /// The output is taken from the concatenated batch of the old state and the new input. + /// If the old state's value is null, then the take index for that entity is the length + /// of the old state plus the current index (i.e. the index into the new input). + /// If not, then we keep the take index as the old state's index. + fn aggregate( + token: &mut ListAccumToken, + key_capacity: usize, + key_indices: &UInt32Array, + input: &ArrayRef, + ) -> anyhow::Result { + Self::ensure_entity_capacity(token, key_capacity)?; + let list_input = as_list_array(input); + + let mut take_new_state: Vec = (0..token.accum.len() as u32).collect(); + let mut take_output_builder = UInt32Array::builder(input.len()); + for input_index in 0..list_input.len() { + let entity_index = key_indices.value(input_index); + if token.value_is_null(entity_index) && list_input.is_valid(input_index) { + // If the `take_new_state[entity_index]` is greater than the length, that + // means it has been set already, so we should not overwrite it. + let not_taken = take_new_state[entity_index as usize] < take_new_state.len() as u32; + if not_taken { + take_new_state[entity_index as usize] = + (input_index + take_new_state.len()) as u32; + } + }; + + take_output_builder.append_value(take_new_state[entity_index as usize]) + } + + // Gather the output, using the previous state and the new input + let output = + sparrow_arrow::concat_take(&token.accum, input, &take_output_builder.finish())?; + + // Update the state token with the new state + let take_new_state = PrimitiveArray::from_iter_values(take_new_state); + let new_state = sparrow_arrow::concat_take(&token.accum, input, &take_new_state)?; + token.set_state(new_state); + + Ok(output) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use arrow::array::{AsArray, Int64Builder, ListBuilder}; + use arrow_schema::{DataType, Field}; + use std::sync::Arc; + + fn default_token() -> ListAccumToken { + let f = Arc::new(Field::new("item", DataType::Int64, true)); + let list = DataType::List(f); + let accum = new_empty_array(&list); + ListAccumToken { accum } + } + + #[test] + fn test_first_list_multiple_batches() { + let mut token = default_token(); + let key_indices = UInt32Array::from(vec![0, 0, 0, 0, 0, 0]); + let key_capacity = 1; + + // Batch 1 + let mut builder = ListBuilder::new(Int64Builder::new()); + builder.append_value([Some(1), Some(2), Some(3)]); + builder.append_value([Some(4), None, Some(5)]); + builder.append_value([None, None]); + builder.append(false); + builder.append_value([]); + builder.append_value([Some(7), Some(8), Some(9)]); + + let array = builder.finish(); + + let input: ArrayRef = Arc::new(array); + let result = + FirstListEvaluator::aggregate(&mut token, key_capacity, &key_indices, &input).unwrap(); + let result = result.as_list(); + + let mut builder = ListBuilder::new(Int64Builder::new()); + for _ in 0..6 { + builder.append_value([Some(1), Some(2), Some(3)]); + } + let expected = builder.finish(); + + assert_eq!(&expected, result); + + // Batch 2 + let mut builder = ListBuilder::new(Int64Builder::new()); + builder.append_value([Some(10), Some(11)]); + builder.append(true); + builder.append_value([Some(13), None]); + builder.append(false); + builder.append(false); + builder.append_value([Some(14)]); + + let array = builder.finish(); + let input: ArrayRef = Arc::new(array); + + // Introduce more entities + let key_indices = UInt32Array::from(vec![0, 1, 2, 1, 0, 1]); + let key_capacity = 3; + let result = + FirstListEvaluator::aggregate(&mut token, key_capacity, &key_indices, &input).unwrap(); + let result = result.as_list(); + + let mut builder = ListBuilder::new(Int64Builder::new()); + builder.append_value([Some(1), Some(2), Some(3)]); + builder.append(true); + builder.append_value([Some(13), None]); + builder.append(true); + builder.append_value([Some(1), Some(2), Some(3)]); + builder.append(true); + let expected = builder.finish(); + + assert_eq!(&expected, result); + } +} diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/list/last_list_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/list/last_list_evaluator.rs new file mode 100644 index 000000000..2a252b5b2 --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/aggregation/list/last_list_evaluator.rs @@ -0,0 +1,194 @@ +use std::sync::Arc; + +use crate::ValueRef; +use crate::{ + AggregationArgs, Evaluator, EvaluatorFactory, ListAccumToken, RuntimeInfo, StateToken, + StaticInfo, +}; +use arrow::array::{ + as_list_array, new_empty_array, Array, ArrayRef, AsArray, PrimitiveArray, UInt32Array, +}; + +/// Evaluator for the `Last` instruction on lists +pub struct LastListEvaluator { + args: AggregationArgs, + token: ListAccumToken, +} + +impl Evaluator for LastListEvaluator { + fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + match &self.args { + AggregationArgs::NoWindow { input } => { + let grouping = info.grouping(); + let input_vals = info.value(input)?.array_ref()?; + let result = Self::aggregate( + &mut self.token, + grouping.num_groups(), + grouping.group_indices(), + &input_vals, + ); + + result + } + AggregationArgs::Since { ticks: _, input: _ } => { + unimplemented!("windowed aggregation over lists") + } + AggregationArgs::Sliding { .. } => { + panic!("expected non-windowed or since-windowed aggregation, saw sliding.") + } + } + } + + fn state_token(&self) -> Option<&dyn StateToken> { + Some(&self.token) + } + + fn state_token_mut(&mut self) -> Option<&mut dyn StateToken> { + Some(&mut self.token) + } +} + +impl EvaluatorFactory for LastListEvaluator { + fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { + let args = AggregationArgs::from_input(info.args)?; + match args { + AggregationArgs::NoWindow { .. } | AggregationArgs::Since { .. } => { + let list_type = info.result_type; + let accum = new_empty_array(list_type).as_list::().to_owned(); + let token = ListAccumToken::new(Arc::new(accum)); + Ok(Box::new(Self { token, args })) + } + AggregationArgs::Sliding { .. } => { + unimplemented!("sliding window aggregation over list unsupported") + } + } + } +} + +impl LastListEvaluator { + /// Resizes the accumulator to the new size. + fn ensure_entity_capacity(token: &mut ListAccumToken, len: usize) -> anyhow::Result<()> { + token.resize(len) + } + + /// Returns the existing value for an entity if it exists, or a new value from the + /// input if it exists, or null if neither. + /// + /// Takes advantage of the `take` and `concat` kernels to avoid having to type the + /// evaluator, keeping everything as ArrayRefs. + /// + /// The output is taken from the concatenated batch of the old state and the new input. + /// If the old state's value is null, then the take index for that entity is the length + /// of the old state plus the current index (i.e. the index into the new input). + /// If not, then we keep the take index as the old state's index. + fn aggregate( + token: &mut ListAccumToken, + key_capacity: usize, + key_indices: &UInt32Array, + input: &ArrayRef, + ) -> anyhow::Result { + Self::ensure_entity_capacity(token, key_capacity)?; + let list_input = as_list_array(input); + + let mut take_new_state: Vec = (0..token.accum.len() as u32).collect(); + let mut take_output_builder = UInt32Array::builder(input.len()); + + for input_index in 0..list_input.len() { + let entity_index = key_indices.value(input_index); + if list_input.is_valid(input_index) { + take_new_state[entity_index as usize] = (input_index + take_new_state.len()) as u32; + } + take_output_builder.append_value(take_new_state[entity_index as usize]) + } + + // Gather the output, using the previous state and the new input + let output = + sparrow_arrow::concat_take(&token.accum, input, &take_output_builder.finish())?; + + // Update the state token with the new state + let take_new_state = PrimitiveArray::from_iter_values(take_new_state); + let new_state = sparrow_arrow::concat_take(&token.accum, input, &take_new_state)?; + token.set_state(new_state); + + Ok(output) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use arrow::array::{AsArray, Int64Builder, ListBuilder}; + use arrow_schema::{DataType, Field}; + use std::sync::Arc; + + fn default_token() -> ListAccumToken { + let f = Arc::new(Field::new("item", DataType::Int64, true)); + let list = DataType::List(f); + let accum = new_empty_array(&list); + ListAccumToken { accum } + } + + #[test] + fn test_last_list_multiple_batches() { + let mut token = default_token(); + let key_indices = UInt32Array::from(vec![0, 0, 0, 0, 0, 0]); + let key_capacity = 1; + + // Batch 1 + let mut builder = ListBuilder::new(Int64Builder::new()); + builder.append_value([Some(1), Some(2), Some(3)]); + builder.append_value([Some(4), None, Some(5)]); + builder.append_value([None, None]); + builder.append(false); + builder.append_value([]); + builder.append_value([Some(7), Some(8), Some(9)]); + + let array = builder.finish(); + + let input: ArrayRef = Arc::new(array); + let result = + LastListEvaluator::aggregate(&mut token, key_capacity, &key_indices, &input).unwrap(); + let result = result.as_list(); + + let mut builder = ListBuilder::new(Int64Builder::new()); + builder.append_value([Some(1), Some(2), Some(3)]); + builder.append_value([Some(4), None, Some(5)]); + builder.append_value([None, None]); + builder.append_value([None, None]); + builder.append_value([]); + builder.append_value([Some(7), Some(8), Some(9)]); + let expected = builder.finish(); + + assert_eq!(&expected, result); + + // Batch 2 + let mut builder = ListBuilder::new(Int64Builder::new()); + builder.append_value([Some(10), Some(11)]); + builder.append(true); + builder.append_value([Some(13), None]); + builder.append(false); + builder.append(false); + builder.append_value([Some(14)]); + + let array = builder.finish(); + let input: ArrayRef = Arc::new(array); + + // Introduce more entities + let key_indices = UInt32Array::from(vec![0, 1, 2, 1, 0, 1]); + let key_capacity = 3; + let result = + LastListEvaluator::aggregate(&mut token, key_capacity, &key_indices, &input).unwrap(); + let result = result.as_list(); + + let mut builder = ListBuilder::new(Int64Builder::new()); + builder.append_value([Some(10), Some(11)]); + builder.append(true); + builder.append_value([Some(13), None]); + builder.append(true); + builder.append_value([Some(10), Some(11)]); + builder.append_value([Some(14)]); + let expected = builder.finish(); + + assert_eq!(&expected, result); + } +} diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/map/first_map_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/map/first_map_evaluator.rs index ce10207b0..fbbb12f30 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/map/first_map_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/map/first_map_evaluator.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use crate::ValueRef; use crate::{ AggregationArgs, Evaluator, EvaluatorFactory, MapAccumToken, RuntimeInfo, StateToken, StaticInfo, @@ -7,7 +8,6 @@ use crate::{ use arrow::array::{ as_map_array, new_empty_array, Array, ArrayRef, AsArray, PrimitiveArray, UInt32Array, }; -use sparrow_plan::ValueRef; /// Evaluator for the `First` instruction on maps pub struct FirstMapEvaluator { diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/map/last_map_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/map/last_map_evaluator.rs index 77d7187b8..f2e8882e7 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/map/last_map_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/map/last_map_evaluator.rs @@ -4,7 +4,7 @@ use arrow::array::{ as_map_array, new_empty_array, Array, ArrayRef, AsArray, PrimitiveArray, UInt32Array, }; -use sparrow_plan::ValueRef; +use crate::ValueRef; use crate::{ AggregationArgs, Evaluator, EvaluatorFactory, MapAccumToken, RuntimeInfo, StateToken, diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/primitive/arrow_agg_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/primitive/arrow_agg_evaluator.rs index 5c4a7ec43..4779a7428 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/primitive/arrow_agg_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/primitive/arrow_agg_evaluator.rs @@ -1,12 +1,12 @@ use std::sync::Arc; +use crate::ValueRef; use arrow::array::{Array, ArrayRef, BooleanArray, PrimitiveArray, UInt32Array}; use arrow::datatypes::ArrowNativeType; use itertools::izip; use serde::de::DeserializeOwned; use serde::Serialize; use sparrow_arrow::downcast::downcast_primitive_array; -use sparrow_plan::ValueRef; use super::two_stacks_arrow_agg_evaluator::TwoStacksArrowAggEvaluator; use crate::evaluators::aggregation::function::agg_fn::ArrowAggFn; diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/primitive/two_stacks_arrow_agg_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/primitive/two_stacks_arrow_agg_evaluator.rs index 9f284476a..ab600a80c 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/primitive/two_stacks_arrow_agg_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/primitive/two_stacks_arrow_agg_evaluator.rs @@ -1,5 +1,6 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::anyhow; use arrow::array::{Array, ArrayRef, BooleanArray, PrimitiveArray, UInt32Array}; use arrow::datatypes::{ArrowNativeType, Int64Type}; @@ -7,7 +8,6 @@ use itertools::izip; use serde::de::DeserializeOwned; use serde::Serialize; use sparrow_arrow::downcast::downcast_primitive_array; -use sparrow_plan::ValueRef; use crate::{ AggregationArgs, ArrowAggFn, Evaluator, RuntimeInfo, StateToken, TwoStacks, diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/string/first_string_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/string/first_string_evaluator.rs index 5ba02dbed..376475308 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/string/first_string_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/string/first_string_evaluator.rs @@ -1,9 +1,9 @@ use std::sync::Arc; +use crate::ValueRef; use arrow::array::{Array, ArrayRef, BooleanArray, StringArray, UInt32Array}; use itertools::izip; use sparrow_arrow::downcast::downcast_string_array; -use sparrow_plan::ValueRef; use super::two_stacks_first_string_evaluator::TwoStacksFirstStringEvaluator; use crate::{ diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/string/last_string_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/string/last_string_evaluator.rs index c47e32761..189a108e9 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/string/last_string_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/string/last_string_evaluator.rs @@ -1,9 +1,9 @@ use std::sync::Arc; +use crate::ValueRef; use arrow::array::{Array, ArrayRef, BooleanArray, StringArray, UInt32Array}; use itertools::izip; use sparrow_arrow::downcast::downcast_string_array; -use sparrow_plan::ValueRef; use super::two_stacks_last_string_evaluator::TwoStacksLastStringEvaluator; use crate::{ diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/string/two_stacks_first_string_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/string/two_stacks_first_string_evaluator.rs index bd5d9f0f8..ab44b175d 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/string/two_stacks_first_string_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/string/two_stacks_first_string_evaluator.rs @@ -1,11 +1,11 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::anyhow; use arrow::array::{Array, ArrayRef, BooleanArray, StringArray, UInt32Array}; use arrow::datatypes::Int64Type; use itertools::izip; use sparrow_arrow::downcast::downcast_string_array; -use sparrow_plan::ValueRef; use crate::{ AggregationArgs, Evaluator, FirstString, RuntimeInfo, StateToken, TwoStacks, diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/string/two_stacks_last_string_evaluator.rs b/crates/sparrow-instructions/src/evaluators/aggregation/string/two_stacks_last_string_evaluator.rs index e3cccf04c..bfb0eb6e9 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/string/two_stacks_last_string_evaluator.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/string/two_stacks_last_string_evaluator.rs @@ -1,11 +1,11 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::anyhow; use arrow::array::{Array, ArrayRef, BooleanArray, StringArray, UInt32Array}; use arrow::datatypes::Int64Type; use itertools::izip; use sparrow_arrow::downcast::downcast_string_array; -use sparrow_plan::ValueRef; use crate::{ AggregationArgs, Evaluator, LastString, RuntimeInfo, StateToken, TwoStacks, diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/token.rs b/crates/sparrow-instructions/src/evaluators/aggregation/token.rs index 766028640..1489b5781 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/token.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/token.rs @@ -1,8 +1,10 @@ //! Tokens representing keys for compute storage. mod boolean_accum_token; +mod collect_struct_token; +mod collect_token; mod count_accum_token; -pub mod lag_token; +mod list_accum_token; mod map_accum_token; mod primitive_accum_token; mod string_accum_token; @@ -12,7 +14,10 @@ mod two_stacks_primitive_accum_token; mod two_stacks_string_accum_token; pub use boolean_accum_token::*; +pub use collect_struct_token::*; +pub use collect_token::*; pub use count_accum_token::*; +pub use list_accum_token::*; pub use map_accum_token::*; pub use primitive_accum_token::*; pub use string_accum_token::*; diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/token/collect_struct_token.rs b/crates/sparrow-instructions/src/evaluators/aggregation/token/collect_struct_token.rs new file mode 100644 index 000000000..281c29b5f --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/aggregation/token/collect_struct_token.rs @@ -0,0 +1,86 @@ +use std::sync::Arc; + +use arrow::array::{new_empty_array, new_null_array, Array, ArrayRef, AsArray}; +use arrow_schema::{DataType, Field, TimeUnit}; + +use crate::{ComputeStore, StateToken, StoreKey}; + +/// Token used for collecting structs +#[derive(Debug, serde::Serialize, serde::Deserialize)] +pub struct CollectStructToken { + /// Stores the state for in-memory usage. + /// + /// A [ListArray] comprised of lists of structs for each entity. + #[serde(with = "sparrow_arrow::serde::array_ref")] + pub state: ArrayRef, + /// Stores the times of the state values. + /// + /// A [ListArray] comprised of lists of timestamps for each entity. + /// + /// This array is only used when we have a `trailing` window. + /// Likely this should be separated into a different implementation. + #[serde(with = "sparrow_arrow::serde::array_ref")] + pub times: ArrayRef, +} + +impl StateToken for CollectStructToken { + fn restore(&mut self, key: &StoreKey, store: &ComputeStore) -> anyhow::Result<()> { + if let Some(state) = store.get(key)? { + let state: CollectStructToken = state; + self.state = state.state; + }; + + // TODO: restore times + panic!("time restoration not implemented") + } + + fn store(&self, key: &StoreKey, store: &ComputeStore) -> anyhow::Result<()> { + store.put(key, &self) + } +} + +impl CollectStructToken { + pub fn new(state: ArrayRef) -> Self { + let field_ref = Arc::new(Field::new( + "item", + DataType::Timestamp(TimeUnit::Nanosecond, None), + true, + )); + let times_type = DataType::List(field_ref); + Self { + state, + times: new_empty_array(×_type), + } + } + + pub fn new_with_time(state: ArrayRef, times: ArrayRef) -> Self { + Self { state, times } + } + + pub fn resize(&mut self, len: usize) -> anyhow::Result<()> { + let diff = len - self.state.len(); + + // Resize the state + let null_array = new_null_array(self.state.data_type(), diff); + let null_array = null_array.as_ref().as_list::(); + let new_state = arrow::compute::concat(&[&self.state, null_array])?; + self.state = new_state.clone(); + + // Resize the times + let null_array = new_null_array(self.times.data_type(), diff); + let null_array = null_array.as_ref().as_list::(); + let new_times = arrow::compute::concat(&[&self.times, null_array])?; + self.times = new_times.clone(); + + Ok(()) + } + + pub fn set_state_and_time(&mut self, new_state: ArrayRef, new_times: ArrayRef) { + self.state = new_state; + self.times = new_times; + } + + pub fn set_state(&mut self, new_state: ArrayRef) { + self.state = new_state; + } +} diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/token/collect_token.rs b/crates/sparrow-instructions/src/evaluators/aggregation/token/collect_token.rs new file mode 100644 index 000000000..9013ea440 --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/aggregation/token/collect_token.rs @@ -0,0 +1,112 @@ +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::collections::VecDeque; + +use crate::{ComputeStore, StateToken, StoreKey}; + +/// State token used for the lag operator. +#[derive(Default, Debug)] +pub struct CollectToken +where + T: Clone, + T: Serialize + DeserializeOwned, + Vec>: Serialize + DeserializeOwned, +{ + state: Vec>, + /// Stores the times of the state values. + /// + /// Comprised of lists of timestamps for each entity. + /// + /// This array is only used when we have a `trailing` window. + /// Likely this should be separated into a different implementation. + times: Vec>, +} + +impl CollectToken +where + T: Clone, + T: Serialize + DeserializeOwned, + Vec>>: Serialize + DeserializeOwned, +{ + pub fn resize(&mut self, len: usize) { + if len >= self.state.len() { + self.state.resize(len + 1, VecDeque::new()); + self.times.resize(len + 1, VecDeque::new()); + } + } + + pub fn add_value(&mut self, max: usize, index: usize, input: T) { + self.state[index].push_back(input); + if self.state[index].len() > max { + self.state[index].pop_front(); + } + } + + /// Adds the input and time, then removes any values that are outside of the window. + pub fn add_value_with_time( + &mut self, + max: usize, + index: usize, + input: T, + time: i64, + window_duration: i64, + ) { + self.state[index].push_back(input); + self.times[index].push_back(time); + if self.times[index].len() > max { + self.state[index].pop_front(); + self.times[index].pop_front(); + } + debug_assert_eq!(self.times[index].len(), self.state[index].len()); + + self.check_time(index, time, window_duration) + } + + /// Pops all values and times that are outside of the window + pub fn check_time(&mut self, index: usize, time: i64, window_duration: i64) { + debug_assert_eq!(self.times[index].len(), self.state[index].len()); + let min_time = time - window_duration; + + if let Some(mut front) = self.times[index].front() { + while *front <= min_time { + self.state[index].pop_front(); + self.times[index].pop_front(); + + if let Some(f) = self.times[index].front() { + front = f + } else { + break; + } + } + } + } + + pub fn state(&self, index: usize) -> &VecDeque { + &self.state[index] + } + + pub fn reset(&mut self, index: usize) { + self.state[index].clear(); + } +} + +impl StateToken for CollectToken +where + T: Clone, + T: Serialize + DeserializeOwned, + Vec>>: Serialize + DeserializeOwned, +{ + fn restore(&mut self, key: &StoreKey, store: &ComputeStore) -> anyhow::Result<()> { + if let Some(state) = store.get(key)? { + self.state = state; + } else { + self.state.clear(); + } + // TODO: restore times + panic!("time restoration not implemented") + } + + fn store(&self, key: &StoreKey, store: &ComputeStore) -> anyhow::Result<()> { + store.put(key, &self.state) + } +} diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/token/lag_token.rs b/crates/sparrow-instructions/src/evaluators/aggregation/token/lag_token.rs deleted file mode 100644 index d8767b983..000000000 --- a/crates/sparrow-instructions/src/evaluators/aggregation/token/lag_token.rs +++ /dev/null @@ -1,27 +0,0 @@ -use arrow::datatypes::ArrowPrimitiveType; -use serde::de::DeserializeOwned; -use serde::Serialize; -use sparrow_kernels::lag::LagPrimitive; - -use crate::{ComputeStore, StateToken, StoreKey}; - -/// State token used for the lag operator. -impl StateToken for LagPrimitive -where - T: ArrowPrimitiveType, - T::Native: Serialize + DeserializeOwned + Copy, -{ - fn restore(&mut self, key: &StoreKey, store: &ComputeStore) -> anyhow::Result<()> { - if let Some(state) = store.get(key)? { - self.state = state; - } else { - self.state = vec![]; - } - - Ok(()) - } - - fn store(&self, key: &StoreKey, store: &ComputeStore) -> anyhow::Result<()> { - store.put(key, &self.state) - } -} diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/token/list_accum_token.rs b/crates/sparrow-instructions/src/evaluators/aggregation/token/list_accum_token.rs new file mode 100644 index 000000000..e955d73c9 --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/aggregation/token/list_accum_token.rs @@ -0,0 +1,57 @@ +use arrow::array::{new_null_array, Array, ArrayRef, AsArray, MapArray}; + +use crate::{ComputeStore, StateToken, StoreKey}; + +/// Token used for map accumulators +/// +/// Map accumulators are serialized as [ArrayRef], working directly with +/// Arrow. +#[derive(serde::Serialize, serde::Deserialize)] +pub struct ListAccumToken { + /// Stores the state for in-memory usage. + #[serde(with = "sparrow_arrow::serde::array_ref")] + pub accum: ArrayRef, +} + +impl StateToken for ListAccumToken { + fn restore(&mut self, key: &StoreKey, store: &ComputeStore) -> anyhow::Result<()> { + if let Some(state) = store.get(key)? { + let state: ListAccumToken = state; + self.accum = state.accum; + }; + Ok(()) + } + + fn store(&self, key: &StoreKey, store: &ComputeStore) -> anyhow::Result<()> { + store.put(key, &self) + } +} + +impl ListAccumToken { + pub fn new(accum: ArrayRef) -> Self { + Self { accum } + } + + pub fn array(&self) -> &MapArray { + self.accum.as_map() + } + + /// Concat nulls to the end of the current accumulator to grow the size. + pub fn resize(&mut self, len: usize) -> anyhow::Result<()> { + let diff = len - self.accum.len(); + + let null_array = new_null_array(self.accum.data_type(), diff); + let null_array = null_array.as_ref().as_list::(); + let new_state = arrow::compute::concat(&[&self.accum, null_array])?; + self.accum = new_state.clone(); + Ok(()) + } + + pub fn value_is_null(&mut self, key: u32) -> bool { + self.accum.is_null(key as usize) + } + + pub fn set_state(&mut self, new_state: ArrayRef) { + self.accum = new_state + } +} diff --git a/crates/sparrow-instructions/src/evaluators/aggregation/token/map_accum_token.rs b/crates/sparrow-instructions/src/evaluators/aggregation/token/map_accum_token.rs index 7959e8024..96e9fcb23 100644 --- a/crates/sparrow-instructions/src/evaluators/aggregation/token/map_accum_token.rs +++ b/crates/sparrow-instructions/src/evaluators/aggregation/token/map_accum_token.rs @@ -1,4 +1,4 @@ -use arrow::array::{new_null_array, Array, ArrayRef, AsArray, MapArray}; +use arrow::array::{new_null_array, Array, ArrayRef, AsArray}; use crate::{ComputeStore, StateToken, StoreKey}; @@ -32,10 +32,6 @@ impl MapAccumToken { Self { accum } } - pub fn array(&self) -> &MapArray { - self.accum.as_map() - } - /// Concat nulls to the end of the current accumulator to grow the size. pub fn resize(&mut self, len: usize) -> anyhow::Result<()> { let diff = len - self.accum.len(); diff --git a/crates/sparrow-instructions/src/evaluators/cast.rs b/crates/sparrow-instructions/src/evaluators/cast.rs index bbb31dfa7..6f398bd44 100644 --- a/crates/sparrow-instructions/src/evaluators/cast.rs +++ b/crates/sparrow-instructions/src/evaluators/cast.rs @@ -4,12 +4,12 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::anyhow; use arrow::array::{ArrayRef, Int32Array, IntervalDayTimeArray, IntervalYearMonthArray}; use arrow::datatypes::DataType; use sparrow_arrow::downcast::downcast_primitive_array; use sparrow_kernels::time::i64_to_two_i32; -use sparrow_plan::ValueRef; use sparrow_syntax::FenlType; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; diff --git a/crates/sparrow-instructions/src/evaluators/comparison.rs b/crates/sparrow-instructions/src/evaluators/comparison.rs index 82a0bda0b..6434c7190 100644 --- a/crates/sparrow-instructions/src/evaluators/comparison.rs +++ b/crates/sparrow-instructions/src/evaluators/comparison.rs @@ -1,11 +1,11 @@ use std::marker::PhantomData; use std::sync::Arc; +use crate::ValueRef; use anyhow::anyhow; use arrow::array::ArrayRef; use arrow::datatypes::{ArrowNativeTypeOp, ArrowNumericType}; use sparrow_arrow::scalar_value::NativeFromScalar; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; diff --git a/crates/sparrow-instructions/src/evaluators/equality.rs b/crates/sparrow-instructions/src/evaluators/equality.rs index 011a53bf9..f064972d2 100644 --- a/crates/sparrow-instructions/src/evaluators/equality.rs +++ b/crates/sparrow-instructions/src/evaluators/equality.rs @@ -1,11 +1,11 @@ use std::marker::PhantomData; use std::sync::Arc; +use crate::ValueRef; use arrow::array::Array; use arrow::array::ArrayRef; use arrow::datatypes::{ArrowNativeTypeOp, ArrowNumericType, DataType}; use sparrow_arrow::scalar_value::NativeFromScalar; -use sparrow_plan::ValueRef; use crate::evaluators::macros::create_typed_evaluator; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; @@ -18,7 +18,9 @@ impl EvaluatorFactory for EqEvaluatorFactory { create_typed_evaluator!( &info.args[0].data_type, NumericEqEvaluator, - MapEqEvaluator, + DynEqEvaluator, + DynEqEvaluator, + DynEqEvaluator, BoolEqEvaluator, StringEqEvaluator, info @@ -31,7 +33,9 @@ impl EvaluatorFactory for NeqEvaluatorFactory { create_typed_evaluator!( &info.args[0].data_type, NumericNeqEvaluator, - MapNeqEvaluator, + DynNeqEvaluator, + DynNeqEvaluator, + DynNeqEvaluator, BoolNeqEvaluator, StringNeqEvaluator, info @@ -39,44 +43,44 @@ impl EvaluatorFactory for NeqEvaluatorFactory { } } -/// Evaluator for `eq` on maps. -struct MapEqEvaluator { +/// Evaluator for `eq` on using `eq_dyn` +struct DynEqEvaluator { lhs: ValueRef, rhs: ValueRef, } -impl Evaluator for MapEqEvaluator { +impl Evaluator for DynEqEvaluator { fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { - let lhs = info.value(&self.lhs)?.map_array()?; - let rhs = info.value(&self.rhs)?.map_array()?; + let lhs = info.value(&self.lhs)?.array_ref()?; + let rhs = info.value(&self.rhs)?.array_ref()?; let result = arrow::compute::eq_dyn(lhs.as_ref(), rhs.as_ref())?; Ok(Arc::new(result)) } } -impl EvaluatorFactory for MapEqEvaluator { +impl EvaluatorFactory for DynEqEvaluator { fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { let (lhs, rhs) = info.unpack_arguments()?; Ok(Box::new(Self { lhs, rhs })) } } -/// Evaluator for `neq` on maps. -struct MapNeqEvaluator { +/// Evaluator for `neq` using `neq_dyn` +struct DynNeqEvaluator { lhs: ValueRef, rhs: ValueRef, } -impl Evaluator for MapNeqEvaluator { +impl Evaluator for DynNeqEvaluator { fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { - let lhs = info.value(&self.lhs)?.map_array()?; - let rhs = info.value(&self.rhs)?.map_array()?; + let lhs = info.value(&self.lhs)?.array_ref()?; + let rhs = info.value(&self.rhs)?.array_ref()?; let result = arrow::compute::neq_dyn(lhs.as_ref(), rhs.as_ref())?; Ok(Arc::new(result)) } } -impl EvaluatorFactory for MapNeqEvaluator { +impl EvaluatorFactory for DynNeqEvaluator { fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { let (lhs, rhs) = info.unpack_arguments()?; Ok(Box::new(Self { lhs, rhs })) diff --git a/crates/sparrow-instructions/src/evaluators/field_ref.rs b/crates/sparrow-instructions/src/evaluators/field_ref.rs index 3e59e3dfa..7375b5bc0 100644 --- a/crates/sparrow-instructions/src/evaluators/field_ref.rs +++ b/crates/sparrow-instructions/src/evaluators/field_ref.rs @@ -1,11 +1,15 @@ //! The field ref instruction isn't a "normal" instruction since the result //! type depends on the type of input. -use anyhow::{anyhow, Context}; -use arrow::array::ArrayRef; +use std::sync::Arc; + +use crate::ValueRef; +use anyhow::anyhow; +use arrow::array::{make_array, Array, ArrayRef, AsArray, ListArray}; +use arrow::buffer::NullBuffer; use arrow::datatypes::DataType; +use arrow_schema::{Field, FieldRef}; use sparrow_arrow::scalar_value::ScalarValue; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, StaticInfo}; @@ -13,6 +17,36 @@ use crate::{Evaluator, EvaluatorFactory, StaticInfo}; pub struct FieldRefEvaluator { base: ValueRef, field_index: usize, + field: FieldRef, +} + +fn get_field_index(field_name: &str, base: &DataType) -> anyhow::Result<(usize, FieldRef)> { + let (base_fields, name) = if let DataType::Struct(fields) = base { + (fields, None) + } else if let DataType::List(field) = base { + if let DataType::Struct(fields) = field.data_type() { + (fields, Some("item")) + } else { + anyhow::bail!("Field-ref only works on lists of records, but was {base:?}"); + } + } else { + anyhow::bail!( + "Unable to create FieldRefEvaluator for input type {:?}", + base + ); + }; + + let (index, field) = base_fields + .find(field_name) + .ok_or_else(|| anyhow!("No field named '{}' in struct {:?}", field_name, base))?; + + // We can't re-use the field if it is part of a collection, which needs special names. + let field = if let Some(name) = name { + Arc::new(Field::new(name, field.data_type().clone(), true)) + } else { + field.clone() + }; + Ok((index, field)) } impl EvaluatorFactory for FieldRefEvaluator { @@ -30,38 +64,51 @@ impl EvaluatorFactory for FieldRefEvaluator { ); }; - let base_fields = if let DataType::Struct(fields) = input_type { - fields - } else { - anyhow::bail!( - "Unable to create FieldRefEvaluator for input type {:?}", - &info.args[0].data_type - ); - }; - - let field_index = base_fields - .iter() - .position(|field| field.name() == field_name) - .ok_or_else(|| anyhow!("No field named '{}' in struct {:?}", field_name, input_type))?; - + let (field_index, field) = get_field_index(field_name, input_type)?; let (base, _) = info.unpack_arguments()?; - Ok(Box::new(Self { base, field_index })) + Ok(Box::new(Self { + base, + field_index, + field, + })) } } impl Evaluator for FieldRefEvaluator { fn evaluate(&mut self, info: &dyn crate::RuntimeInfo) -> anyhow::Result { - let input = info.value(&self.base)?.struct_array()?; - - let field_array = input.column(self.field_index); - - // Arrow field ref ignores the null-ness of the outer struct. - // To handle this, we null the field out if the struct was null. - // TODO: Express in the DFG so we can avoid computing `is_not_null` repeatedly? - let is_struct_null = arrow::compute::kernels::boolean::is_null(input.as_ref())?; - // We need to null out rows that are null in the struct, since - // they may not be properly nulled-out in the field column. - arrow::compute::nullif(field_array.as_ref(), &is_struct_null) - .context("null_if for field ref") + let input = info.value(&self.base)?.array_ref()?; + + match input.data_type() { + DataType::Struct(_) => { + let input = input.as_struct(); + let field = input.column(self.field_index); + + // Arrow field ref ignores the null-ness of the outer struct. + // To handle this, we null the field out if the struct was null. + let nulls = NullBuffer::union(input.nulls(), field.nulls()); + + let data = field.to_data().into_builder().nulls(nulls).build()?; + Ok(make_array(data)) + } + DataType::List(_) => { + let list = input.as_list(); + let structs = list.values().as_struct(); + let fields = structs.column(self.field_index); + + // Arrow field ref ignores the null-ness of the outer struct. + // To handle this, we null the field out if the struct was null. + let nulls = NullBuffer::union(structs.nulls(), fields.nulls()); + + let values = make_array(fields.to_data().into_builder().nulls(nulls).build()?); + let list = ListArray::new( + self.field.clone(), + list.offsets().clone(), + values, + list.nulls().cloned(), + ); + Ok(Arc::new(list)) + } + unsupported => anyhow::bail!("Unsupported input for field ref: {unsupported:?}"), + } } } diff --git a/crates/sparrow-instructions/src/evaluators/general.rs b/crates/sparrow-instructions/src/evaluators/general.rs index 4bf8235ba..3ab702adb 100644 --- a/crates/sparrow-instructions/src/evaluators/general.rs +++ b/crates/sparrow-instructions/src/evaluators/general.rs @@ -1,8 +1,8 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::Context; use arrow::array::ArrayRef; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; @@ -34,7 +34,7 @@ pub(super) struct HashEvaluator { impl Evaluator for HashEvaluator { fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { let input = info.value(&self.input)?.array_ref()?; - let result = sparrow_arrow::hash::hash(input.as_ref())?; + let result = sparrow_arrow::hash::hash(input.as_ref()).map_err(|e| e.into_error())?; Ok(Arc::new(result)) } } diff --git a/crates/sparrow-instructions/src/evaluators/json_field.rs b/crates/sparrow-instructions/src/evaluators/json_field.rs index 931830b50..c0bf5cfac 100644 --- a/crates/sparrow-instructions/src/evaluators/json_field.rs +++ b/crates/sparrow-instructions/src/evaluators/json_field.rs @@ -1,10 +1,10 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::Context; use arrow::array::{Array, ArrayRef, StringArray}; use owning_ref::ArcRef; use sparrow_arrow::scalar_value::ScalarValue; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; @@ -103,11 +103,11 @@ impl EvaluatorFactory for JsonFieldEvaluator { mod tests { use std::sync::Arc; + use crate::{InstKind, InstOp}; use arrow::array::{GenericStringArray, StringArray}; use arrow::datatypes::DataType; use owning_ref::OwningRef; use sparrow_arrow::downcast::downcast_string_array; - use sparrow_plan::{InstKind, InstOp}; use super::*; use crate::StaticArg; diff --git a/crates/sparrow-instructions/src/evaluators/list.rs b/crates/sparrow-instructions/src/evaluators/list.rs new file mode 100644 index 000000000..ece0f58b2 --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/list.rs @@ -0,0 +1,17 @@ +mod collect_boolean; +mod collect_primitive; +mod collect_string; +mod collect_struct; +mod flatten; +mod index; +mod list_len; +mod union; + +pub(super) use collect_boolean::*; +pub(super) use collect_primitive::*; +pub(super) use collect_string::*; +pub(super) use collect_struct::*; +pub(super) use flatten::*; +pub(super) use index::*; +pub(super) use list_len::*; +pub(super) use union::*; diff --git a/crates/sparrow-instructions/src/evaluators/list/collect_boolean.rs b/crates/sparrow-instructions/src/evaluators/list/collect_boolean.rs new file mode 100644 index 000000000..2ded0469a --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/list/collect_boolean.rs @@ -0,0 +1,235 @@ +use crate::ValueRef; +use crate::{CollectToken, Evaluator, EvaluatorFactory, RuntimeInfo, StateToken, StaticInfo}; +use arrow::array::{ArrayRef, AsArray, BooleanBuilder, ListBuilder, TimestampNanosecondArray}; +use arrow::datatypes::{DataType, Int64Type}; +use itertools::izip; +use sparrow_arrow::scalar_value::ScalarValue; +use std::sync::Arc; + +/// Evaluator for the `collect` instruction. +/// +/// Collects a stream of values into a List. A list is produced +/// for each input value received, growing up to a maximum size. +/// +/// If the list is empty, an empty list is returned (rather than `null`). +#[derive(Debug)] +pub struct CollectBooleanEvaluator { + /// The min size of the buffer. + /// + /// If the buffer is smaller than this, a null value + /// will be produced. + min: usize, + /// The max size of the buffer. + /// + /// Once the max size is reached, the front will be popped and the new + /// value pushed to the back. + max: usize, + input: ValueRef, + tick: ValueRef, + duration: ValueRef, + /// Contains the buffer of values for each entity + token: CollectToken, +} + +impl EvaluatorFactory for CollectBooleanEvaluator { + fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { + let input_type = info.args[0].data_type(); + let result_type = info.result_type; + match result_type { + DataType::List(t) => anyhow::ensure!(t.data_type() == input_type), + other => anyhow::bail!("expected list result type, saw {:?}", other), + }; + + let max = match info.args[1].value_ref.literal_value() { + Some(ScalarValue::Int64(Some(v))) if *v <= 0 => { + anyhow::bail!("unexpected value of `max` -- must be > 0") + } + Some(ScalarValue::Int64(Some(v))) => *v as usize, + // If a user specifies `max = null`, we use usize::MAX value as a way + // to have an "unlimited" buffer. + Some(ScalarValue::Int64(None)) => usize::MAX, + Some(other) => anyhow::bail!("expected i64 for max parameter, saw {:?}", other), + None => anyhow::bail!("expected literal value for max parameter"), + }; + + let min = match info.args[2].value_ref.literal_value() { + Some(ScalarValue::Int64(Some(v))) if *v < 0 => { + anyhow::bail!("unexpected value of `min` -- must be >= 0") + } + Some(ScalarValue::Int64(Some(v))) => *v as usize, + // If a user specifies `min = null`, default to 0. + Some(ScalarValue::Int64(None)) => 0, + Some(other) => anyhow::bail!("expected i64 for min parameter, saw {:?}", other), + None => anyhow::bail!("expected literal value for min parameter"), + }; + assert!(min < max, "min must be less than max"); + + let (input, _, _, tick, duration) = info.unpack_arguments()?; + Ok(Box::new(Self { + min, + max, + input, + tick, + duration, + token: CollectToken::default(), + })) + } +} + +impl Evaluator for CollectBooleanEvaluator { + fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + match (self.tick.is_literal_null(), self.duration.is_literal_null()) { + (true, true) => self.evaluate_non_windowed(info), + (false, true) => self.evaluate_since_windowed(info), + (true, false) => self.evaluate_trailing_windowed(info), + (false, false) => panic!("sliding window aggregation should use other evaluator"), + } + } + + fn state_token(&self) -> Option<&dyn StateToken> { + Some(&self.token) + } + + fn state_token_mut(&mut self) -> Option<&mut dyn StateToken> { + Some(&mut self.token) + } +} + +impl CollectBooleanEvaluator { + fn ensure_entity_capacity(&mut self, len: usize) { + self.token.resize(len) + } + + fn evaluate_non_windowed(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + let input = info.value(&self.input)?.array_ref()?; + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + assert_eq!(entity_indices.len(), input.len()); + + self.ensure_entity_capacity(key_capacity); + + let input = input.as_boolean(); + let builder = BooleanBuilder::new(); + let mut list_builder = ListBuilder::new(builder); + + izip!(entity_indices.values(), input).for_each(|(entity_index, input)| { + let entity_index = *entity_index as usize; + + // Do not collect null values + if let Some(input) = input { + self.token.add_value(self.max, entity_index, input); + } + + let cur_list = self.token.state(entity_index); + if cur_list.len() >= self.min { + list_builder.append_value(cur_list.iter().map(|i| Some(*i))); + } else { + list_builder.append_null(); + } + }); + + Ok(Arc::new(list_builder.finish())) + } + + /// Since windows follow the pattern "update -> emit -> reset". + /// + /// i.e. if an input appears in the same row as a tick, then that value will + /// be included in the output before the tick causes the state to be cleared. + /// However, note that ticks are generated with a maximum subsort value, so it is + /// unlikely an input naturally appears in the same row as a tick. It is more likely + /// that an input may appear at the same time, but an earlier subsort value. + fn evaluate_since_windowed(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + let input = info.value(&self.input)?.array_ref()?; + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + assert_eq!(entity_indices.len(), input.len()); + + self.ensure_entity_capacity(key_capacity); + + let input = input.as_boolean(); + let ticks = info.value(&self.tick)?.array_ref()?; + let ticks = ticks.as_boolean(); + + let builder = BooleanBuilder::new(); + let mut list_builder = ListBuilder::new(builder); + + izip!(entity_indices.values(), ticks, input).for_each(|(entity_index, tick, input)| { + let entity_index = *entity_index as usize; + + // Update state + // Do not collect null values + if let Some(input) = input { + self.token.add_value(self.max, entity_index, input); + } + + // Emit state + let cur_list = self.token.state(entity_index); + if cur_list.len() >= self.min { + list_builder.append_value(cur_list.iter().map(|i| Some(*i))); + } else { + list_builder.append_null(); + } + + // Reset state + if let Some(true) = tick { + self.token.reset(entity_index); + } + }); + + Ok(Arc::new(list_builder.finish())) + } + + /// Trailing windows emit values from the window of the current point to the + /// current time minus the given duration. + fn evaluate_trailing_windowed(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + let duration = info + .value(&self.duration)? + .try_primitive_literal::()? + .ok_or_else(|| anyhow::anyhow!("Expected non-null literal duration"))?; + debug_assert!(duration > 0); + + let input = info.value(&self.input)?.array_ref()?; + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + assert_eq!(entity_indices.len(), input.len()); + + self.ensure_entity_capacity(key_capacity); + + let input = input.as_boolean(); + let input_times = info.time_column().array_ref()?; + let input_times: &TimestampNanosecondArray = input_times.as_primitive(); + + let builder = BooleanBuilder::new(); + let mut list_builder = ListBuilder::new(builder); + + izip!(entity_indices.values(), input, input_times.values()).for_each( + |(entity_index, input, input_time)| { + let entity_index = *entity_index as usize; + + // Update state + // Do not collect null values + if let Some(input) = input { + self.token.add_value_with_time( + self.max, + entity_index, + input, + *input_time, + duration, + ); + } else { + self.token.check_time(entity_index, *input_time, duration); + } + + // Emit state + let cur_list = self.token.state(entity_index); + if cur_list.len() >= self.min { + list_builder.append_value(cur_list.iter().map(|i| Some(*i))); + } else { + list_builder.append_null(); + } + }, + ); + + Ok(Arc::new(list_builder.finish())) + } +} diff --git a/crates/sparrow-instructions/src/evaluators/list/collect_primitive.rs b/crates/sparrow-instructions/src/evaluators/list/collect_primitive.rs new file mode 100644 index 000000000..28a1e087e --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/list/collect_primitive.rs @@ -0,0 +1,257 @@ +use std::sync::Arc; + +use arrow::array::{ArrayRef, AsArray, ListBuilder, PrimitiveBuilder, TimestampNanosecondArray}; +use arrow::datatypes::{ArrowPrimitiveType, DataType, Int64Type}; + +use itertools::izip; +use serde::de::DeserializeOwned; +use serde::Serialize; +use sparrow_arrow::scalar_value::ScalarValue; + +use crate::ValueRef; + +use crate::{CollectToken, Evaluator, EvaluatorFactory, RuntimeInfo, StateToken, StaticInfo}; + +/// Evaluator for the `collect` instruction. +/// +/// Collect collects a stream of values into a List. A list is produced +/// for each input value received, growing up to a maximum size. +/// +/// If the list is empty, an empty list is returned (rather than `null`). +#[derive(Debug)] +pub struct CollectPrimitiveEvaluator +where + T: ArrowPrimitiveType, + T::Native: Serialize + DeserializeOwned + Copy, +{ + /// The min size of the buffer. + /// + /// If the buffer is smaller than this, a null value + /// will be produced. + min: usize, + /// The max size of the buffer. + /// + /// Once the max size is reached, the front will be popped and the new + /// value pushed to the back. + max: usize, + input: ValueRef, + tick: ValueRef, + duration: ValueRef, + /// Contains the buffer of values for each entity + token: CollectToken, +} + +impl EvaluatorFactory for CollectPrimitiveEvaluator +where + T: ArrowPrimitiveType + Send + Sync, + T::Native: Serialize + DeserializeOwned + Copy, +{ + fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { + let input_type = info.args[0].data_type(); + let result_type = info.result_type; + match result_type { + DataType::List(t) => anyhow::ensure!(t.data_type() == input_type), + other => anyhow::bail!("expected list result type, saw {:?}", other), + }; + + let max = match info.args[1].value_ref.literal_value() { + Some(ScalarValue::Int64(Some(v))) if *v <= 0 => { + anyhow::bail!("unexpected value of `max` -- must be > 0") + } + Some(ScalarValue::Int64(Some(v))) => *v as usize, + // If a user specifies `max = null`, we use usize::MAX value as a way + // to have an "unlimited" buffer. + Some(ScalarValue::Int64(None)) => usize::MAX, + Some(other) => anyhow::bail!("expected i64 for max parameter, saw {:?}", other), + None => anyhow::bail!("expected literal value for max parameter"), + }; + + let min = match info.args[2].value_ref.literal_value() { + Some(ScalarValue::Int64(Some(v))) if *v < 0 => { + anyhow::bail!("unexpected value of `min` -- must be >= 0") + } + Some(ScalarValue::Int64(Some(v))) => *v as usize, + // If a user specifies `min = null`, default to 0. + Some(ScalarValue::Int64(None)) => 0, + Some(other) => anyhow::bail!("expected i64 for min parameter, saw {:?}", other), + None => anyhow::bail!("expected literal value for min parameter"), + }; + debug_assert!(min <= max, "min must be less than max"); + + let (input, _, _, tick, duration) = info.unpack_arguments()?; + Ok(Box::new(Self { + min, + max, + input, + tick, + duration, + token: CollectToken::default(), + })) + } +} + +impl Evaluator for CollectPrimitiveEvaluator +where + T: ArrowPrimitiveType + Send + Sync, + T::Native: Serialize + DeserializeOwned + Copy, +{ + fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + match (self.tick.is_literal_null(), self.duration.is_literal_null()) { + (true, true) => self.evaluate_non_windowed(info), + (false, true) => self.evaluate_since_windowed(info), + (true, false) => self.evaluate_trailing_windowed(info), + (false, false) => panic!("sliding window aggregation should use other evaluator"), + } + } + + fn state_token(&self) -> Option<&dyn StateToken> { + Some(&self.token) + } + + fn state_token_mut(&mut self) -> Option<&mut dyn StateToken> { + Some(&mut self.token) + } +} + +impl CollectPrimitiveEvaluator +where + T: ArrowPrimitiveType + Send + Sync, + T::Native: Serialize + DeserializeOwned + Copy, +{ + fn ensure_entity_capacity(&mut self, len: usize) { + self.token.resize(len) + } + + fn evaluate_non_windowed(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + let input = info.value(&self.input)?.array_ref()?; + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + assert_eq!(entity_indices.len(), input.len()); + + self.ensure_entity_capacity(key_capacity); + + let input = input.as_primitive::(); + let builder = PrimitiveBuilder::::new(); + let mut list_builder = ListBuilder::new(builder); + + izip!(entity_indices.values(), input).for_each(|(entity_index, input)| { + let entity_index = *entity_index as usize; + + // Do not collect null values + if let Some(input) = input { + self.token.add_value(self.max, entity_index, input); + } + + let cur_list = self.token.state(entity_index); + if cur_list.len() >= self.min { + list_builder.append_value(cur_list.iter().map(|i| Some(*i))); + } else { + list_builder.append_null(); + } + }); + + Ok(Arc::new(list_builder.finish())) + } + + /// Since windows follow the pattern "update -> emit -> reset". + /// + /// i.e. if an input appears in the same row as a tick, then that value will + /// be included in the output before the tick causes the state to be cleared. + /// However, note that ticks are generated with a maximum subsort value, so it is + /// unlikely an input naturally appears in the same row as a tick. It is more likely + /// that an input may appear at the same time, but an earlier subsort value. + fn evaluate_since_windowed(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + let input = info.value(&self.input)?.array_ref()?; + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + assert_eq!(entity_indices.len(), input.len()); + + self.ensure_entity_capacity(key_capacity); + + let input = input.as_primitive::(); + let ticks = info.value(&self.tick)?.array_ref()?; + let ticks = ticks.as_boolean(); + + let builder = PrimitiveBuilder::::new(); + let mut list_builder = ListBuilder::new(builder); + + izip!(entity_indices.values(), ticks, input).for_each(|(entity_index, tick, input)| { + let entity_index = *entity_index as usize; + + // Update state + // Do not collect null values + if let Some(input) = input { + self.token.add_value(self.max, entity_index, input); + } + + // Emit state + let cur_list = self.token.state(entity_index); + if cur_list.len() >= self.min { + list_builder.append_value(cur_list.iter().map(|i| Some(*i))); + } else { + list_builder.append_null(); + } + + // Reset state + if let Some(true) = tick { + self.token.reset(entity_index); + } + }); + + Ok(Arc::new(list_builder.finish())) + } + + /// Trailing windows emit values from the window of the current point to the + /// current time minus the given duration. + fn evaluate_trailing_windowed(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + let duration = info + .value(&self.duration)? + .try_primitive_literal::()? + .ok_or_else(|| anyhow::anyhow!("Expected non-null literal duration"))?; + debug_assert!(duration > 0); + + let input = info.value(&self.input)?.array_ref()?; + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + assert_eq!(entity_indices.len(), input.len()); + + self.ensure_entity_capacity(key_capacity); + + let input = input.as_primitive::(); + let input_times = info.time_column().array_ref()?; + let input_times: &TimestampNanosecondArray = input_times.as_primitive(); + + let builder = PrimitiveBuilder::::new(); + let mut list_builder = ListBuilder::new(builder); + + izip!(entity_indices.values(), input, input_times.values()).for_each( + |(entity_index, input, input_time)| { + let entity_index = *entity_index as usize; + + // Update state + // Do not collect null values + if let Some(input) = input { + self.token.add_value_with_time( + self.max, + entity_index, + input, + *input_time, + duration, + ); + } else { + self.token.check_time(entity_index, *input_time, duration); + } + + // Emit state + let cur_list = self.token.state(entity_index); + if cur_list.len() >= self.min { + list_builder.append_value(cur_list.iter().map(|i| Some(*i))); + } else { + list_builder.append_null(); + } + }, + ); + + Ok(Arc::new(list_builder.finish())) + } +} diff --git a/crates/sparrow-instructions/src/evaluators/list/collect_string.rs b/crates/sparrow-instructions/src/evaluators/list/collect_string.rs new file mode 100644 index 000000000..02b8faf8c --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/list/collect_string.rs @@ -0,0 +1,237 @@ +use crate::ValueRef; +use crate::{CollectToken, Evaluator, EvaluatorFactory, RuntimeInfo, StateToken, StaticInfo}; +use arrow::array::{ArrayRef, AsArray, ListBuilder, StringBuilder, TimestampNanosecondArray}; +use arrow::datatypes::{DataType, Int64Type}; +use itertools::izip; +use sparrow_arrow::scalar_value::ScalarValue; +use std::sync::Arc; + +/// Evaluator for the `collect` instruction. +/// +/// Collects a stream of values into a List. A list is produced +/// for each input value received, growing up to a maximum size. +/// +/// If the list is empty, an empty list is returned (rather than `null`). +#[derive(Debug)] +pub struct CollectStringEvaluator { + /// The min size of the buffer. + /// + /// If the buffer is smaller than this, a null value + /// will be produced. + min: usize, + /// The max size of the buffer. + /// + /// Once the max size is reached, the front will be popped and the new + /// value pushed to the back. + max: usize, + input: ValueRef, + tick: ValueRef, + duration: ValueRef, + /// Contains the buffer of values for each entity + token: CollectToken, +} + +impl EvaluatorFactory for CollectStringEvaluator { + fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { + let input_type = info.args[0].data_type(); + let result_type = info.result_type; + match result_type { + DataType::List(t) => anyhow::ensure!(t.data_type() == input_type), + other => anyhow::bail!("expected list result type, saw {:?}", other), + }; + + let max = match info.args[1].value_ref.literal_value() { + Some(ScalarValue::Int64(Some(v))) if *v <= 0 => { + anyhow::bail!("unexpected value of `max` -- must be > 0") + } + Some(ScalarValue::Int64(Some(v))) => *v as usize, + // If a user specifies `max = null`, we use usize::MAX value as a way + // to have an "unlimited" buffer. + Some(ScalarValue::Int64(None)) => usize::MAX, + Some(other) => anyhow::bail!("expected i64 for max parameter, saw {:?}", other), + None => anyhow::bail!("expected literal value for max parameter"), + }; + + let min = match info.args[2].value_ref.literal_value() { + Some(ScalarValue::Int64(Some(v))) if *v < 0 => { + anyhow::bail!("unexpected value of `min` -- must be >= 0") + } + Some(ScalarValue::Int64(Some(v))) => *v as usize, + // If a user specifies `min = null`, default to 0. + Some(ScalarValue::Int64(None)) => 0, + Some(other) => anyhow::bail!("expected i64 for min parameter, saw {:?}", other), + None => anyhow::bail!("expected literal value for min parameter"), + }; + assert!(min < max, "min must be less than max"); + + let (input, _, _, tick, duration) = info.unpack_arguments()?; + Ok(Box::new(Self { + min, + max, + input, + tick, + duration, + token: CollectToken::default(), + })) + } +} + +impl Evaluator for CollectStringEvaluator { + fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + match (self.tick.is_literal_null(), self.duration.is_literal_null()) { + (true, true) => self.evaluate_non_windowed(info), + (false, true) => self.evaluate_since_windowed(info), + (true, false) => self.evaluate_trailing_windowed(info), + (false, false) => panic!("sliding window aggregation should use other evaluator"), + } + } + + fn state_token(&self) -> Option<&dyn StateToken> { + Some(&self.token) + } + + fn state_token_mut(&mut self) -> Option<&mut dyn StateToken> { + Some(&mut self.token) + } +} + +impl CollectStringEvaluator { + fn ensure_entity_capacity(&mut self, len: usize) { + self.token.resize(len) + } + + fn evaluate_non_windowed(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + let input = info.value(&self.input)?.array_ref()?; + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + assert_eq!(entity_indices.len(), input.len()); + + self.ensure_entity_capacity(key_capacity); + + let input = input.as_string::(); + let builder = StringBuilder::new(); + let mut list_builder = ListBuilder::new(builder); + + izip!(entity_indices.values(), input).for_each(|(entity_index, input)| { + let entity_index = *entity_index as usize; + + // Do not collect null values + if let Some(input) = input { + self.token + .add_value(self.max, entity_index, input.to_owned()); + } + + let cur_list = self.token.state(entity_index); + if cur_list.len() >= self.min { + list_builder.append_value(cur_list.iter().map(Some)); + } else { + list_builder.append_null(); + } + }); + + Ok(Arc::new(list_builder.finish())) + } + + /// Since windows follow the pattern "update -> emit -> reset". + /// + /// i.e. if an input appears in the same row as a tick, then that value will + /// be included in the output before the tick causes the state to be cleared. + /// However, note that ticks are generated with a maximum subsort value, so it is + /// unlikely an input naturally appears in the same row as a tick. It is more likely + /// that an input may appear at the same time, but an earlier subsort value. + fn evaluate_since_windowed(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + let input = info.value(&self.input)?.array_ref()?; + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + assert_eq!(entity_indices.len(), input.len()); + + self.ensure_entity_capacity(key_capacity); + + let input = input.as_string::(); + let ticks = info.value(&self.tick)?.array_ref()?; + let ticks = ticks.as_boolean(); + + let builder = StringBuilder::new(); + let mut list_builder = ListBuilder::new(builder); + + izip!(entity_indices.values(), ticks, input).for_each(|(entity_index, tick, input)| { + let entity_index = *entity_index as usize; + + // Update state + // Do not collect null values + if let Some(input) = input { + self.token + .add_value(self.max, entity_index, input.to_owned()); + } + + // Emit state + let cur_list = self.token.state(entity_index); + if cur_list.len() >= self.min { + list_builder.append_value(cur_list.iter().map(Some)); + } else { + list_builder.append_null(); + } + + // Reset state + if let Some(true) = tick { + self.token.reset(entity_index); + } + }); + + Ok(Arc::new(list_builder.finish())) + } + + /// Trailing windows emit values from the window of the current point to the + /// current time minus the given duration. + fn evaluate_trailing_windowed(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + let duration = info + .value(&self.duration)? + .try_primitive_literal::()? + .ok_or_else(|| anyhow::anyhow!("Expected non-null literal duration"))?; + debug_assert!(duration > 0); + + let input = info.value(&self.input)?.array_ref()?; + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + assert_eq!(entity_indices.len(), input.len()); + + self.ensure_entity_capacity(key_capacity); + + let input = input.as_string::(); + let input_times = info.time_column().array_ref()?; + let input_times: &TimestampNanosecondArray = input_times.as_primitive(); + + let builder = StringBuilder::new(); + let mut list_builder = ListBuilder::new(builder); + + izip!(entity_indices.values(), input, input_times.values()).for_each( + |(entity_index, input, input_time)| { + let entity_index = *entity_index as usize; + + // Update state + // Do not collect null values + if let Some(input) = input { + self.token.add_value_with_time( + self.max, + entity_index, + input.to_owned(), + *input_time, + duration, + ); + } else { + self.token.check_time(entity_index, *input_time, duration); + } + + // Emit state + let cur_list = self.token.state(entity_index); + if cur_list.len() >= self.min { + list_builder.append_value(cur_list.iter().map(Some)); + } else { + list_builder.append_null(); + } + }, + ); + + Ok(Arc::new(list_builder.finish())) + } +} diff --git a/crates/sparrow-instructions/src/evaluators/list/collect_struct.rs b/crates/sparrow-instructions/src/evaluators/list/collect_struct.rs new file mode 100644 index 000000000..120516525 --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/list/collect_struct.rs @@ -0,0 +1,1234 @@ +use crate::ValueRef; +use crate::{CollectStructToken, Evaluator, EvaluatorFactory, RuntimeInfo, StateToken, StaticInfo}; +use arrow::array::{ + new_empty_array, Array, ArrayRef, AsArray, ListArray, TimestampNanosecondArray, UInt32Array, + UInt32Builder, +}; +use arrow::buffer::{BooleanBuffer, NullBuffer, OffsetBuffer, ScalarBuffer}; +use arrow::datatypes::{DataType, Int64Type}; +use arrow_schema::{Field, TimeUnit}; +use itertools::{izip, Itertools}; +use sparrow_arrow::scalar_value::ScalarValue; +use std::collections::{BTreeMap, VecDeque}; +use std::sync::Arc; + +/// Evaluator for the `collect` instruction. +/// +/// Collects a stream of struct values into a List. A list is produced +/// for each input value received, growing up to a maximum size. +/// +/// If the list is empty, an empty list is returned (rather than `null`). +#[derive(Debug)] +pub struct CollectStructEvaluator { + /// The min size of the buffer. + /// + /// If the buffer is smaller than this, a null value + /// will be produced. + min: usize, + /// The max size of the buffer. + /// + /// Once the max size is reached, the front will be popped and the new + /// value pushed to the back. + max: usize, + input: ValueRef, + tick: ValueRef, + duration: ValueRef, + /// Contains the buffer of values for each entity + token: CollectStructToken, +} + +impl EvaluatorFactory for CollectStructEvaluator { + fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { + let input_type = info.args[0].data_type(); + let result_type = info.result_type; + match result_type { + DataType::List(t) => { + anyhow::ensure!(matches!(input_type, DataType::Struct(..))); + anyhow::ensure!(t.data_type() == input_type); + } + other => anyhow::bail!("expected list result type, saw {:?}", other), + }; + + let max = match info.args[1].value_ref.literal_value() { + Some(ScalarValue::Int64(Some(v))) if *v <= 0 => { + anyhow::bail!("unexpected value of `max` -- must be > 0") + } + Some(ScalarValue::Int64(Some(v))) => *v as usize, + // If a user specifies `max = null`, we use usize::MAX value as a way + // to have an "unlimited" buffer. + Some(ScalarValue::Int64(None)) => usize::MAX, + Some(other) => anyhow::bail!("expected i64 for max parameter, saw {:?}", other), + None => anyhow::bail!("expected literal value for max parameter"), + }; + let min = match info.args[2].value_ref.literal_value() { + Some(ScalarValue::Int64(Some(v))) if *v < 0 => { + anyhow::bail!("unexpected value of `min` -- must be >= 0") + } + Some(ScalarValue::Int64(Some(v))) => *v as usize, + // If a user specifies `min = null`, default to 0. + Some(ScalarValue::Int64(None)) => 0, + Some(other) => anyhow::bail!("expected i64 for min parameter, saw {:?}", other), + None => anyhow::bail!("expected literal value for min parameter"), + }; + debug_assert!(min <= max, "min must be less than max"); + + let accum = new_empty_array(result_type).as_list::().to_owned(); + let token = CollectStructToken::new(Arc::new(accum)); + let (input, _, _, tick, duration) = info.unpack_arguments()?; + Ok(Box::new(Self { + min, + max, + input, + tick, + duration, + token, + })) + } +} + +impl Evaluator for CollectStructEvaluator { + fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + match (self.tick.is_literal_null(), self.duration.is_literal_null()) { + (true, true) => { + let token = &mut self.token; + let input = info.value(&self.input)?.array_ref()?; + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + Self::evaluate_non_windowed( + token, + key_capacity, + entity_indices, + input, + self.min, + self.max, + ) + } + (false, true) => { + let token = &mut self.token; + let input = info.value(&self.input)?.array_ref()?; + let tick = info.value(&self.tick)?.array_ref()?; + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + Self::evaluate_since_windowed( + token, + key_capacity, + entity_indices, + input, + tick, + self.min, + self.max, + ) + } + (true, false) => { + let token = &mut self.token; + let input = info.value(&self.input)?.array_ref()?; + let input_times = info.time_column().array_ref()?; + + // The duration is the nanosecond time to trail the window by. + let duration = info + .value(&self.duration)? + .try_primitive_literal::()? + .ok_or_else(|| anyhow::anyhow!("Expected non-null literal duration"))?; + + let key_capacity = info.grouping().num_groups(); + let entity_indices = info.grouping().group_indices(); + Self::evaluate_trailing_windowed( + token, + key_capacity, + entity_indices, + input, + input_times, + duration, + self.min, + self.max, + ) + } + (false, false) => panic!("sliding window aggregation should use other evaluator"), + } + } + + fn state_token(&self) -> Option<&dyn StateToken> { + Some(&self.token) + } + + fn state_token_mut(&mut self) -> Option<&mut dyn StateToken> { + Some(&mut self.token) + } +} + +impl CollectStructEvaluator { + fn ensure_entity_capacity(token: &mut CollectStructToken, len: usize) -> anyhow::Result<()> { + token.resize(len) + } + + /// Construct the entity take indices for the current state. + fn construct_entity_take_indices( + token: &mut CollectStructToken, + ) -> BTreeMap> { + let mut entity_take_indices = BTreeMap::>::new(); + let state = token.state.as_list::(); + for (index, (start, end)) in state.offsets().iter().tuple_windows().enumerate() { + // The index of enumeration is the entity index + entity_take_indices.insert(index as u32, (*start as u32..*end as u32).collect()); + } + entity_take_indices + } + + /// Evaluate the collect instruction for non-windowed aggregation. + /// + /// This algorithm takes advantage of the fact that [ListArray]s are + /// represented as a single flattened list of values and a list of offsets. + /// By constructing the take indices for each entity, we can then use + /// [sparrow_arrow::concat_take] to efficiently take the values at the indices + /// we need to construct the output list. + /// + /// See the following example: + /// + /// Current state: [Ben = [A, B], Jordan = [C, D]] + /// state.flattened: [A, B, C, D] + /// state.offsets: [0, 2, 4] + /// + /// New Input: [E, F, G] + /// Entity Indices: [Ben, Jordan, Ben] + /// + /// Concat the flattened state with the new input: + /// concat_state_input: [A, B, C, D, E, F, G] + /// + /// Create the current entity take indices: + /// { Ben: [0, 1], Jordan: [2, 3] } + /// + /// For each entity, we need to take the values at the indices + /// from the new input, append all indices for that entity to the + /// Output Take Indices, then append the number of indices to the + /// Output Offset builder: + /// + /// Entity: Ben, Input: [E] + /// Entity Take Indices: { Ben: [0, 1, 4], Jordan: [2, 3] } + /// Output Take Indices: [0, 1, 4] + /// Output Offset: [0, 3] + /// + /// Entity: Jordan, Input: [F] + /// Entity Take Indices: { Ben: [0, 1, 4], Jordan: [2, 3, 5] } + /// Output Take Indices: [0, 1, 4, 2, 3, 5] + /// Output Offset: [0, 3, 6] + /// + /// Entity: Ben, Input: [G] + /// Entity Take Indices: { Ben: [0, 1, 4, 6], Jordan: [2, 3, 5] } + /// Output Take Indices: [0, 1, 4, 2, 3, 5, 0, 1, 4, 6] + /// Output Offset: [0, 3, 6, 10] + /// + /// Then, we use [sparrow_arrow::concat_take] to concat the old flattened state + /// and the input together, then take the Output Take Indices. This constructs an + /// output: + /// + /// concat_state_input: [A, B, C, D, E, F, G] + /// Output Take Indices: [0, 1, 4, 2, 3, 5, 0, 1, 4, 6] + /// Output Values: [A, B, E, C, D, F, A, B, E, G] + /// + /// Then, with the offsets, we can construct the output lists: + /// [[A, B, E], [C, D, F], [A, B, E, G]] + /// + /// Lastly, the new state must be set. + /// The Entity Take Indices are flattened and the offsets are constructed. + /// + /// Entity Take Indices: { Ben: [0, 1, 4, 6], Jordan: [2, 3, 5] } + /// Flattened: [0, 1, 4, 6, 2, 3, 5] + /// Offsets: [0, 4, 7] + /// + /// New State: [Ben = [A, B, E, G], Jordan = [C, D, F]] + fn evaluate_non_windowed( + token: &mut CollectStructToken, + key_capacity: usize, + entity_indices: &UInt32Array, + input: ArrayRef, + min: usize, + max: usize, + ) -> anyhow::Result { + let input_structs = input.as_struct(); + assert_eq!(entity_indices.len(), input_structs.len()); + + Self::ensure_entity_capacity(token, key_capacity)?; + + // Recreate the take indices for the current state + let mut entity_take_indices = Self::construct_entity_take_indices(token); + + let old_state = token.state.as_list::(); + let old_state_flat = old_state.values(); + + let mut take_output_builder = UInt32Builder::new(); + let mut output_offset_builder = vec![0]; + + // Tracks the result's null values + let mut null_buffer = vec![]; + + let mut cur_offset = 0; + // For each entity, append the take indices for the new input to the existing + // entity take indices + for (index, entity_index) in entity_indices.values().iter().enumerate() { + if input.is_valid(index) { + let take_index = (old_state_flat.len() + index) as u32; + entity_take_indices + .entry(*entity_index) + .and_modify(|v| { + v.push_back(take_index); + if v.len() > max { + v.pop_front(); + } + }) + .or_insert(vec![take_index].into()); + } + + // safety: map was resized to handle entity_index size + let entity_take = entity_take_indices.get(entity_index).unwrap(); + + if entity_take.len() >= min { + // Append this entity's take indices to the take output builder + entity_take.iter().for_each(|i| { + take_output_builder.append_value(*i); + }); + + // Append this entity's current number of take indices to the output offset builder + cur_offset += entity_take.len(); + + output_offset_builder.push(cur_offset as i32); + null_buffer.push(true); + } else { + // Append null if there are not enough values + take_output_builder.append_null(); + null_buffer.push(false); + + // Cur offset increases by 1 to account for the null value + cur_offset += 1; + output_offset_builder.push(cur_offset as i32); + } + } + let output_values = + sparrow_arrow::concat_take(old_state_flat, &input, &take_output_builder.finish())?; + + let fields = input_structs.fields().clone(); + let field = Arc::new(Field::new("item", DataType::Struct(fields.clone()), true)); + + let result = ListArray::new( + field, + OffsetBuffer::new(ScalarBuffer::from(output_offset_builder)), + output_values, + Some(NullBuffer::from(BooleanBuffer::from(null_buffer))), + ); + + // Now update the new state using the last entity take indices + let new_state = update_token_state(&entity_take_indices, old_state_flat, input, fields)?; + token.set_state(Arc::new(new_state)); + + Ok(Arc::new(result)) + } + + /// Evaluates the collect function for structs with a `since` window. + /// + /// State is handled in order of "update -> emit -> reset". + /// + /// Follows the same implementation as above, but resets the state of + /// an entity when a `tick` is seen. + fn evaluate_since_windowed( + token: &mut CollectStructToken, + key_capacity: usize, + entity_indices: &UInt32Array, + input: ArrayRef, + ticks: ArrayRef, + min: usize, + max: usize, + ) -> anyhow::Result { + let ticks = ticks.as_boolean(); + let input_structs = input.as_struct(); + assert_eq!(entity_indices.len(), input_structs.len()); + + Self::ensure_entity_capacity(token, key_capacity)?; + + // Recreate the take indices for the current state + let mut entity_take_indices = Self::construct_entity_take_indices(token); + + let old_state = token.state.as_list::(); + let old_state_flat = old_state.values(); + + let mut take_output_builder = UInt32Builder::new(); + let mut output_offset_builder = vec![0]; + + // Tracks the result's null values + let mut null_buffer = vec![]; + + let mut cur_offset = 0; + // For each entity, append the take indices for the new input to the existing + // entity take indices + for (index, (tick, entity_index)) in + izip!(ticks.values().iter(), entity_indices.values().iter()).enumerate() + { + // Update state + if input.is_valid(index) { + let take_index = (old_state_flat.len() + index) as u32; + entity_take_indices + .entry(*entity_index) + .and_modify(|v| { + v.push_back(take_index); + if v.len() > max { + v.pop_front(); + } + }) + .or_insert(vec![take_index].into()); + } + + // safety: map was resized to handle entity_index size + let entity_take = entity_take_indices.get(entity_index).unwrap(); + + // Emit state + if entity_take.len() >= min { + // Append this entity's take indices to the take output builder + entity_take.iter().for_each(|i| { + take_output_builder.append_value(*i); + }); + + // Append this entity's current number of take indices to the output offset builder + cur_offset += entity_take.len(); + + output_offset_builder.push(cur_offset as i32); + null_buffer.push(true); + } else { + // Append null if there are not enough values + take_output_builder.append_null(); + null_buffer.push(false); + + // Cur offset increases by 1 to account for the null value + cur_offset += 1; + output_offset_builder.push(cur_offset as i32); + } + + // Reset state + if ticks.is_valid(index) && tick { + entity_take_indices.insert(*entity_index, vec![].into()); + } + } + let output_values = + sparrow_arrow::concat_take(old_state_flat, &input, &take_output_builder.finish())?; + + let fields = input_structs.fields().clone(); + let field = Arc::new(Field::new("item", DataType::Struct(fields.clone()), true)); + + let result = ListArray::new( + field, + OffsetBuffer::new(ScalarBuffer::from(output_offset_builder)), + output_values, + Some(NullBuffer::from(BooleanBuffer::from(null_buffer))), + ); + + // Now update the new state using the last entity take indices + let new_state = update_token_state(&entity_take_indices, old_state_flat, input, fields)?; + token.set_state(Arc::new(new_state)); + + Ok(Arc::new(result)) + } + + /// Evaluates the collect function for structs with a `trailing` window. + /// + /// State is handled in order of "update -> emit -> reset". + /// + /// Follows the same implementation as above, but includes values in [current time - duration]. + #[allow(clippy::too_many_arguments)] + fn evaluate_trailing_windowed( + token: &mut CollectStructToken, + key_capacity: usize, + entity_indices: &UInt32Array, + input: ArrayRef, + input_times: ArrayRef, + duration: i64, + min: usize, + max: usize, + ) -> anyhow::Result { + let input_structs = input.as_struct(); + assert_eq!(entity_indices.len(), input_structs.len()); + + Self::ensure_entity_capacity(token, key_capacity)?; + + // Recreate the take indices for the current state + let mut entity_take_indices = Self::construct_entity_take_indices(token); + + let old_state = token.state.as_list::(); + let old_state_flat = old_state.values(); + + let mut take_output_builder = UInt32Builder::new(); + let mut output_offset_builder = vec![0]; + + // Tracks the result's null values + let mut null_buffer = vec![]; + + // Concat the state's times and the input's times + let old_times = token.times.as_list::(); + let old_times_flat = old_times.values(); + let combined_times = + arrow::compute::concat(&[old_times_flat.as_ref(), input_times.as_ref()])?; + let combined_times: &TimestampNanosecondArray = combined_times.as_primitive(); + assert_eq!( + old_state_flat.len(), + old_times_flat.len(), + "time and state length mismatch" + ); + + let mut cur_offset = 0; + // For each entity, append the take indices for the new input to the existing + // entity take indices + for (index, entity_index) in entity_indices.values().iter().enumerate() { + // Update state + let take_index = (old_state_flat.len() + index) as u32; + if input.is_valid(index) { + entity_take_indices + .entry(*entity_index) + .and_modify(|v| { + v.push_back(take_index); + if v.len() > max { + v.pop_front(); + } + }) + .or_insert(vec![take_index].into()); + + pop_trailing_window_if_needed( + take_index as usize, + *entity_index, + &mut entity_take_indices, + combined_times, + duration, + ); + } else { + pop_trailing_window_if_needed( + take_index as usize, + *entity_index, + &mut entity_take_indices, + combined_times, + duration, + ); + } + + // safety: map was resized to handle entity_index size + let entity_take = entity_take_indices.get(entity_index).unwrap(); + + // Emit state + if entity_take.len() >= min { + // Append this entity's take indices to the take output builder + entity_take.iter().for_each(|i| { + take_output_builder.append_value(*i); + }); + + // Append this entity's current number of take indices to the output offset builder + cur_offset += entity_take.len(); + + output_offset_builder.push(cur_offset as i32); + null_buffer.push(true); + } else { + // Append null if there are not enough values + take_output_builder.append_null(); + null_buffer.push(false); + + // Cur offset increases by 1 to account for the null value + cur_offset += 1; + output_offset_builder.push(cur_offset as i32); + } + } + let output_values = + sparrow_arrow::concat_take(old_state_flat, &input, &take_output_builder.finish())?; + + let fields = input_structs.fields().clone(); + let field = Arc::new(Field::new("item", DataType::Struct(fields.clone()), true)); + + let result = ListArray::new( + field, + OffsetBuffer::new(ScalarBuffer::from(output_offset_builder)), + output_values, + Some(NullBuffer::from(BooleanBuffer::from(null_buffer))), + ); + + // Now update the new state using the last entity take indices + let new_state = update_token_state(&entity_take_indices, old_state_flat, input, fields)?; + let new_times = update_token_times(&entity_take_indices, old_times_flat, input_times)?; + token.set_state_and_time(Arc::new(new_state), Arc::new(new_times)); + + Ok(Arc::new(result)) + } +} + +/// Pops the front element(s) from the window if time has progressed +/// past the window's duration. +fn pop_trailing_window_if_needed( + take_index: usize, + entity_index: u32, + entity_take_indices: &mut BTreeMap>, + combined_times: &arrow::array::PrimitiveArray, + duration: i64, +) { + let v = entity_take_indices + .entry(entity_index) + .or_insert(vec![].into()); + + if let Some(front_i) = v.front() { + let mut oldest_time = combined_times.value(*front_i as usize); + // Note this uses the `combined_times` and `take_index` + // because it's possible we need to pop off new input + let min_window_start = combined_times.value(take_index) - duration; + while oldest_time <= min_window_start { + v.pop_front(); + if let Some(f_i) = v.front() { + oldest_time = combined_times.value(*f_i as usize); + } else { + return; + } + } + } +} + +/// Uses the final entity take indices to get the new state +fn update_token_state( + entity_take_indices: &BTreeMap>, + old_state_flat: &Arc, + input: Arc, + fields: arrow_schema::Fields, +) -> anyhow::Result { + let mut new_state_offset_builder = Vec::with_capacity(entity_take_indices.len()); + new_state_offset_builder.push(0); + + let mut cur_state_offset = 0; + let take_new_state = entity_take_indices.values().flat_map(|v| { + cur_state_offset += v.len() as i32; + new_state_offset_builder.push(cur_state_offset); + v.iter().copied().map(Some) + }); + let take_new_state = UInt32Array::from_iter(take_new_state); + + let new_state_values = sparrow_arrow::concat_take(old_state_flat, &input, &take_new_state)?; + let new_state = ListArray::new( + Arc::new(Field::new("item", DataType::Struct(fields), true)), + OffsetBuffer::new(ScalarBuffer::from(new_state_offset_builder)), + new_state_values, + None, + ); + Ok(new_state) +} + +/// Uses the final entity take indices to get the new times +fn update_token_times( + entity_take_indices: &BTreeMap>, + old_times_flat: &Arc, + input_times: Arc, +) -> anyhow::Result { + let mut new_times_offset_builder = Vec::with_capacity(entity_take_indices.len()); + new_times_offset_builder.push(0); + + let mut cur_times_offset = 0; + let take_new_times = entity_take_indices.values().flat_map(|v| { + cur_times_offset += v.len() as i32; + new_times_offset_builder.push(cur_times_offset); + v.iter().copied().map(Some) + }); + let take_new_times = UInt32Array::from_iter(take_new_times); + + let new_times_values = + sparrow_arrow::concat_take(old_times_flat, &input_times, &take_new_times)?; + let new_state = ListArray::new( + Arc::new(Field::new( + "item", + DataType::Timestamp(TimeUnit::Nanosecond, None), + true, + )), + OffsetBuffer::new(ScalarBuffer::from(new_times_offset_builder)), + new_times_values, + None, + ); + Ok(new_state) +} + +#[cfg(test)] +mod tests { + use super::*; + use arrow::{ + array::{ + new_null_array, ArrayBuilder, AsArray, Int64Array, Int64Builder, StringArray, + StringBuilder, StructArray, StructBuilder, + }, + buffer::ScalarBuffer, + }; + use arrow_schema::{DataType, Field, Fields}; + use std::sync::Arc; + + fn default_token() -> CollectStructToken { + let f = Arc::new(Field::new( + "item", + DataType::Struct(Fields::from(vec![ + Field::new("n", DataType::Int64, true), + Field::new("s", DataType::Utf8, true), + ])), + true, + )); + let result_type = DataType::List(f); + let accum = new_empty_array(&result_type).as_list::().to_owned(); + CollectStructToken::new(Arc::new(accum)) + } + + #[test] + fn test_basic_collect_multiple_batches() { + let mut token = default_token(); + // Batch 1 + let n_array = Int64Array::from(vec![Some(0), Some(1), Some(2)]); + let s_array = StringArray::from(vec![Some("a"), Some("b"), Some("c")]); + let input = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let input = Arc::new(input); + + let key_indices = UInt32Array::from(vec![0, 0, 0]); + let key_capacity = 1; + + let result = CollectStructEvaluator::evaluate_non_windowed( + &mut token, + key_capacity, + &key_indices, + input, + 0, + usize::MAX, + ) + .unwrap(); + let result = result.as_list::(); + + // build expected result 1 + let n_array = Int64Array::from(vec![Some(0), Some(0), Some(1), Some(0), Some(1), Some(2)]); + let s_array = StringArray::from(vec![ + Some("a"), + Some("a"), + Some("b"), + Some("a"), + Some("b"), + Some("c"), + ]); + let expected = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let offsets = ScalarBuffer::from(vec![0, 1, 3, 6]); + let expected = ListArray::new( + Arc::new(Field::new("item", expected.data_type().clone(), true)), + OffsetBuffer::new(offsets), + Arc::new(expected), + None, + ); + let expected = Arc::new(expected); + assert_eq!(expected.as_ref(), result); + + // Batch 2 + let n_array = Int64Array::from(vec![Some(3), Some(4), Some(5)]); + let s_array = StringArray::from(vec![Some("d"), Some("e"), Some("f")]); + let input = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let input = Arc::new(input); + + // New entity! + let key_indices = UInt32Array::from(vec![1, 0, 1]); + let key_capacity = 2; + + let result = CollectStructEvaluator::evaluate_non_windowed( + &mut token, + key_capacity, + &key_indices, + input, + 0, + usize::MAX, + ) + .unwrap(); + let result = result.as_list::(); + + // build expected result 2 + let n_array = Int64Array::from(vec![ + Some(3), + Some(0), + Some(1), + Some(2), + Some(4), + Some(3), + Some(5), + ]); + let s_array = StringArray::from(vec![ + Some("d"), + Some("a"), + Some("b"), + Some("c"), + Some("e"), + Some("d"), + Some("f"), + ]); + let expected = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let offsets = ScalarBuffer::from(vec![0, 1, 5, 7]); + let expected = ListArray::new( + Arc::new(Field::new("item", expected.data_type().clone(), true)), + OffsetBuffer::new(offsets), + Arc::new(expected), + None, + ); + let expected = Arc::new(expected); + assert_eq!(expected.as_ref(), result); + } + + #[test] + fn test_basic_collect_multiple_batches_with_max() { + let max = 2; + let mut token = default_token(); + // Batch 1 + let n_array = Int64Array::from(vec![Some(0), Some(1), Some(2)]); + let s_array = StringArray::from(vec![Some("a"), Some("b"), Some("c")]); + let input = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let input = Arc::new(input); + + let key_indices = UInt32Array::from(vec![0, 0, 0]); + let key_capacity = 1; + + let result = CollectStructEvaluator::evaluate_non_windowed( + &mut token, + key_capacity, + &key_indices, + input, + 0, + max, + ) + .unwrap(); + let result = result.as_list::(); + + // build expected result 1 + let n_array = Int64Array::from(vec![Some(0), Some(0), Some(1), Some(1), Some(2)]); + let s_array = + StringArray::from(vec![Some("a"), Some("a"), Some("b"), Some("b"), Some("c")]); + let expected = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let offsets = ScalarBuffer::from(vec![0, 1, 3, 5]); + let expected = ListArray::new( + Arc::new(Field::new("item", expected.data_type().clone(), true)), + OffsetBuffer::new(offsets), + Arc::new(expected), + None, + ); + let expected = Arc::new(expected); + assert_eq!(expected.as_ref(), result); + + // Batch 2 + let n_array = Int64Array::from(vec![Some(3), Some(4), Some(5)]); + let s_array = StringArray::from(vec![Some("d"), Some("e"), Some("f")]); + let input = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let input = Arc::new(input); + + // New entity! + let key_indices = UInt32Array::from(vec![1, 0, 1]); + let key_capacity = 2; + + let result = CollectStructEvaluator::evaluate_non_windowed( + &mut token, + key_capacity, + &key_indices, + input, + 0, + max, + ) + .unwrap(); + let result = result.as_list::(); + + // build expected result 2 + let n_array = Int64Array::from(vec![Some(3), Some(2), Some(4), Some(3), Some(5)]); + let s_array = + StringArray::from(vec![Some("d"), Some("c"), Some("e"), Some("d"), Some("f")]); + let expected = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let offsets = ScalarBuffer::from(vec![0, 1, 3, 5]); + let expected = ListArray::new( + Arc::new(Field::new("item", expected.data_type().clone(), true)), + OffsetBuffer::new(offsets), + Arc::new(expected), + None, + ); + let expected = Arc::new(expected); + + assert_eq!(expected.as_ref(), result); + } + + #[test] + fn test_basic_collect_multiple_batches_with_min() { + let min = 3; + let mut token = default_token(); + // Batch 1 + let n_array = Int64Array::from(vec![Some(0), Some(1), Some(2)]); + let s_array = StringArray::from(vec![Some("a"), Some("b"), Some("c")]); + let input = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let input = Arc::new(input); + + let key_indices = UInt32Array::from(vec![0, 0, 0]); + let key_capacity = 1; + + let result = CollectStructEvaluator::evaluate_non_windowed( + &mut token, + key_capacity, + &key_indices, + input, + min, + usize::MAX, + ) + .unwrap(); + let result = result.as_list::(); + + // build expected result 1 + let n_array = Int64Array::from(vec![Some(0), Some(1), Some(2)]); + let s_array = StringArray::from(vec![Some("a"), Some("b"), Some("c")]); + let expected = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let offsets = ScalarBuffer::from(vec![0, 3]); + let expected = ListArray::new( + Arc::new(Field::new("item", expected.data_type().clone(), true)), + OffsetBuffer::new(offsets), + Arc::new(expected), + None, + ); + let null_structs = new_null_array(expected.data_type(), 2); + let expected = Arc::new(expected); + let expected = arrow::compute::concat(&[null_structs.as_ref(), expected.as_ref()]).unwrap(); + assert_eq!(expected.as_ref(), result); + + // Batch 2 + let n_array = Int64Array::from(vec![Some(3), Some(4), Some(5)]); + let s_array = StringArray::from(vec![Some("d"), Some("e"), Some("f")]); + let input = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let input = Arc::new(input); + + // New entity! + let key_indices = UInt32Array::from(vec![1, 0, 1]); + let key_capacity = 2; + + let result = CollectStructEvaluator::evaluate_non_windowed( + &mut token, + key_capacity, + &key_indices, + input, + min, + usize::MAX, + ) + .unwrap(); + let result = result.as_list::(); + + // build expected result 2 + let n_array = Int64Array::from(vec![Some(0), Some(1), Some(2), Some(4)]); + let s_array = StringArray::from(vec![Some("a"), Some("b"), Some("c"), Some("e")]); + let expected = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let offsets = ScalarBuffer::from(vec![0, 4]); + let expected = ListArray::new( + Arc::new(Field::new("item", expected.data_type().clone(), true)), + OffsetBuffer::new(offsets), + Arc::new(expected), + None, + ); + let null_structs = new_null_array(expected.data_type(), 1); + let expected = Arc::new(expected); + let expected = arrow::compute::concat(&[ + null_structs.as_ref(), + expected.as_ref(), + null_structs.as_ref(), + ]) + .unwrap(); + assert_eq!(expected.as_ref(), result); + } + + #[test] + fn test_trailing_collect() { + let min = 0; + let max = 10; + let duration = 6; + + let mut token = default_token(); + // Batch 1 + let n_array = Int64Array::from(vec![Some(0), Some(1), Some(2)]); + let s_array = StringArray::from(vec![Some("a"), Some("b"), Some("c")]); + let input = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let input = Arc::new(input); + + let key_indices = UInt32Array::from(vec![0, 0, 0]); + let key_capacity = 1; + + let input_times = TimestampNanosecondArray::from(vec![0, 5, 10]); + let input_times = Arc::new(input_times); + + let result = CollectStructEvaluator::evaluate_trailing_windowed( + &mut token, + key_capacity, + &key_indices, + input, + input_times, + duration, + min, + max, + ) + .unwrap(); + let result = result.as_list::(); + + // build expected result 1 + let n_array = Int64Array::from(vec![Some(0), Some(0), Some(1), Some(1), Some(2)]); + let s_array = + StringArray::from(vec![Some("a"), Some("a"), Some("b"), Some("b"), Some("c")]); + let expected = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let offsets = ScalarBuffer::from(vec![0, 1, 3, 5]); + let expected = ListArray::new( + Arc::new(Field::new("item", expected.data_type().clone(), true)), + OffsetBuffer::new(offsets), + Arc::new(expected), + None, + ); + let expected = Arc::new(expected); + assert_eq!(expected.as_ref(), result); + + // Batch 2 + let n_array = Int64Array::from(vec![Some(3), Some(4), Some(5)]); + let s_array = StringArray::from(vec![Some("d"), Some("e"), Some("f")]); + let input = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let input = Arc::new(input); + + // New entity! + let key_indices = UInt32Array::from(vec![1, 0, 1]); + let key_capacity = 2; + + let input_times = TimestampNanosecondArray::from(vec![15, 20, 25]); + let input_times = Arc::new(input_times); + + let result = CollectStructEvaluator::evaluate_trailing_windowed( + &mut token, + key_capacity, + &key_indices, + input, + input_times, + duration, + min, + max, + ) + .unwrap(); + let result = result.as_list::(); + + // build expected result 2 + let n_array = Int64Array::from(vec![Some(3), Some(4), Some(5)]); + let s_array = StringArray::from(vec![Some("d"), Some("e"), Some("f")]); + let expected = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let offsets = ScalarBuffer::from(vec![0, 1, 2, 3]); + let expected = ListArray::new( + Arc::new(Field::new("item", expected.data_type().clone(), true)), + OffsetBuffer::new(offsets), + Arc::new(expected), + None, + ); + let expected = Arc::new(expected); + assert_eq!(expected.as_ref(), result); + } + + #[test] + fn test_ignores_null_inputs() { + let mut token = default_token(); + let fields = Fields::from(vec![ + Field::new("n", DataType::Int64, true), + Field::new("s", DataType::Utf8, true), + ]); + let field_builders: Vec> = vec![ + Box::new(Int64Builder::new()), + Box::new(StringBuilder::new()), + ]; + + // batch + let mut builder = StructBuilder::new(fields.clone(), field_builders); + builder + .field_builder::(0) + .unwrap() + .append_value(0); + builder + .field_builder::(1) + .unwrap() + .append_value("a"); + builder.append(true); + + builder + .field_builder::(0) + .unwrap() + .append_null(); + builder + .field_builder::(1) + .unwrap() + .append_null(); + builder.append(false); + + builder + .field_builder::(0) + .unwrap() + .append_value(1); + builder + .field_builder::(1) + .unwrap() + .append_value("b"); + builder.append(true); + + builder + .field_builder::(0) + .unwrap() + .append_value(2); + builder + .field_builder::(1) + .unwrap() + .append_value("c"); + builder.append(true); + + let input = builder.finish(); + let input = Arc::new(input); + + let key_indices = UInt32Array::from(vec![0, 0, 0, 0]); + let key_capacity = 1; + + let result = CollectStructEvaluator::evaluate_non_windowed( + &mut token, + key_capacity, + &key_indices, + input, + 0, + usize::MAX, + ) + .unwrap(); + let result = result.as_list::(); + + // build expected result + let n_array = Int64Array::from(vec![ + Some(0), + Some(0), + Some(0), + Some(1), + Some(0), + Some(1), + Some(2), + ]); + let s_array = StringArray::from(vec![ + Some("a"), + Some("a"), + Some("a"), + Some("b"), + Some("a"), + Some("b"), + Some("c"), + ]); + let expected = StructArray::new( + Fields::from(vec![ + Field::new("n", n_array.data_type().clone(), true), + Field::new("s", s_array.data_type().clone(), true), + ]), + vec![Arc::new(n_array), Arc::new(s_array)], + None, + ); + let offsets = ScalarBuffer::from(vec![0, 1, 2, 4, 7]); + let expected = ListArray::new( + Arc::new(Field::new("item", expected.data_type().clone(), true)), + OffsetBuffer::new(offsets), + Arc::new(expected), + None, + ); + let expected = Arc::new(expected); + assert_eq!(expected.as_ref(), result); + } +} diff --git a/crates/sparrow-instructions/src/evaluators/list/flatten.rs b/crates/sparrow-instructions/src/evaluators/list/flatten.rs new file mode 100644 index 000000000..8ab49570e --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/list/flatten.rs @@ -0,0 +1,70 @@ +use std::sync::Arc; + +use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; + +use crate::ValueRef; +use arrow::array::{Array, ArrayRef, AsArray, BufferBuilder, ListArray}; +use arrow::buffer::{OffsetBuffer, ScalarBuffer}; +use arrow_schema::{DataType, Field}; + +/// Evaluator for the `flatten` instruction. +#[derive(Debug)] +pub struct FlattenEvaluator { + input: ValueRef, + field: Arc, +} + +impl EvaluatorFactory for FlattenEvaluator { + fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { + let outer = info.args[0].data_type(); + let inner = match outer { + DataType::List(field) => field.data_type(), + _ => { + anyhow::bail!("Input to flatten must be a list of lists, was {outer:?}.") + } + }; + let field = match inner { + DataType::List(field) => field.clone(), + _ => { + anyhow::bail!("Input to flatten must be a list of lists, was {outer:?}.") + } + }; + + let input = info.unpack_argument()?; + Ok(Box::new(Self { input, field })) + } +} + +impl Evaluator for FlattenEvaluator { + fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + let input = info.value(&self.input)?.array_ref()?; + + let outer_list: &ListArray = input.as_list(); + let inner_list: &ListArray = outer_list.values().as_list(); + + let len = outer_list.len(); + let mut offsets = BufferBuilder::new(len + 1); + + let mut outer_offsets = outer_list.offsets().iter().copied(); + let inner_offsets = inner_list.offsets(); + offsets.append(inner_offsets[outer_offsets.next().unwrap() as usize]); + for offset in outer_offsets { + // we know that the outer list contains the inner lists from [start..end). + // we also know that each inner list contains elements + // [inner_offset(i)..inner_offset(i + 1)) + // this we want elements + offsets.append(inner_offsets[offset as usize]); + } + let offsets = offsets.finish(); + let offsets = ScalarBuffer::new(offsets, 0, len + 1); + let offsets = OffsetBuffer::new(offsets); + + let result = ListArray::new( + self.field.clone(), + offsets, + inner_list.values().clone(), + outer_list.nulls().cloned(), + ); + Ok(Arc::new(result)) + } +} diff --git a/crates/sparrow-instructions/src/evaluators/list/index.rs b/crates/sparrow-instructions/src/evaluators/list/index.rs new file mode 100644 index 000000000..db9533e19 --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/list/index.rs @@ -0,0 +1,141 @@ +use anyhow::Context; +use arrow::array::{Array, ArrayRef, AsArray, Int32Array, Int64Array, ListArray}; + +use crate::ValueRef; +use arrow_schema::DataType; +use itertools::Itertools; +use std::sync::Arc; + +use crate::{Evaluator, EvaluatorFactory, StaticInfo}; + +/// Evaluator for `index` on lists. +/// +/// Retrieves the value at the given index. +#[derive(Debug)] +pub(in crate::evaluators) struct IndexEvaluator { + index: ValueRef, + list: ValueRef, +} + +impl EvaluatorFactory for IndexEvaluator { + fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { + let input_type = info.args[1].data_type.clone(); + match input_type { + DataType::List(t) => anyhow::ensure!(t.data_type() == info.result_type), + other => anyhow::bail!("expected list type, saw {:?}", other), + }; + + let (index, list) = info.unpack_arguments()?; + Ok(Box::new(Self { index, list })) + } +} + +impl Evaluator for IndexEvaluator { + fn evaluate(&mut self, info: &dyn crate::RuntimeInfo) -> anyhow::Result { + let list_input = info.value(&self.list)?.array_ref()?; + let index_input = info.value(&self.index)?.primitive_array()?; + + let result = list_get(&list_input, &index_input)?; + Ok(Arc::new(result)) + } +} + +/// Given a `ListArray` and `index` array of the same length return an array of the values. +fn list_get(list: &ArrayRef, indices: &Int64Array) -> anyhow::Result { + anyhow::ensure!(list.len() == indices.len()); + + let list = list.as_list(); + let take_indices = list_indices(list, indices)?; + arrow::compute::take(list.values(), &take_indices, None).context("take in get_map") +} + +/// Gets the indices in the list where the values are at the index within each list. +fn list_indices(list: &ListArray, indices: &Int64Array) -> anyhow::Result { + let offsets = list.offsets(); + + let mut result = Int32Array::builder(indices.len()); + let offsets = offsets.iter().map(|n| *n as usize).tuple_windows(); + + 'outer: for (index, (start, next)) in offsets.enumerate() { + let list_start = 0; + let list_end = next - start; + if indices.is_valid(index) { + // The inner index corresponds to the index within each list. + let inner_index = indices.value(index) as usize; + // The outer index corresponds to the index with the flattened array. + let outer_index = start + inner_index; + if inner_index >= list_start && inner_index < list_end { + result.append_value(outer_index as i32); + continue 'outer; + } + } + result.append_null(); + } + + Ok(result.finish()) +} + +#[cfg(test)] +mod tests { + use crate::evaluators::list::index::list_get; + use arrow::array::{ + as_boolean_array, as_primitive_array, as_string_array, ArrayRef, BooleanArray, + BooleanBuilder, Int32Array, Int32Builder, Int64Array, ListBuilder, StringArray, + StringBuilder, + }; + use std::sync::Arc; + + #[test] + fn test_index_primitive() { + let mut builder = ListBuilder::new(Int32Builder::new()); + builder.append_value([Some(1), Some(2), Some(3)]); + builder.append_value([]); + builder.append_value([None]); + builder.append_value([Some(10), Some(8), Some(4)]); + builder.append_value([Some(10), Some(15), Some(19), Some(123)]); + + let array: ArrayRef = Arc::new(builder.finish()); + + let indices = Int64Array::from(vec![0, 1, 2, 0, 1]); + let actual = list_get(&array, &indices).unwrap(); + let actual: &Int32Array = as_primitive_array(actual.as_ref()); + let expected = Int32Array::from(vec![Some(1), None, None, Some(10), Some(15)]); + assert_eq!(actual, &expected); + } + + #[test] + fn test_index_string() { + let mut builder = ListBuilder::new(StringBuilder::new()); + builder.append_value([Some("hello"), None, Some("world")]); + builder.append_value([Some("apple")]); + builder.append_value([None, Some("carrot")]); + builder.append_value([None, Some("dog"), Some("cat")]); + builder.append_value([Some("bird"), Some("fish")]); + + let array: ArrayRef = Arc::new(builder.finish()); + + let indices = Int64Array::from(vec![0, 1, 2, 0, 1]); + let actual = list_get(&array, &indices).unwrap(); + let actual: &StringArray = as_string_array(actual.as_ref()); + let expected = StringArray::from(vec![Some("hello"), None, None, None, Some("fish")]); + assert_eq!(actual, &expected); + } + + #[test] + fn test_index_boolean() { + let mut builder = ListBuilder::new(BooleanBuilder::new()); + builder.append_value([Some(true), None, Some(false)]); + builder.append_value([Some(false)]); + builder.append_value([None, Some(false)]); + builder.append_value([None, Some(true), Some(false)]); + builder.append_value([Some(true), Some(false)]); + + let array: ArrayRef = Arc::new(builder.finish()); + + let indices = Int64Array::from(vec![0, 1, 2, 0, 1]); + let actual = list_get(&array, &indices).unwrap(); + let actual: &BooleanArray = as_boolean_array(actual.as_ref()); + let expected = BooleanArray::from(vec![Some(true), None, None, None, Some(false)]); + assert_eq!(actual, &expected); + } +} diff --git a/crates/sparrow-instructions/src/evaluators/list/list_len.rs b/crates/sparrow-instructions/src/evaluators/list/list_len.rs new file mode 100644 index 000000000..d7cc23d59 --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/list/list_len.rs @@ -0,0 +1,36 @@ +use arrow::array::ArrayRef; + +use crate::ValueRef; +use arrow_schema::DataType; +use std::sync::Arc; + +use crate::{Evaluator, EvaluatorFactory, StaticInfo}; + +/// Evaluator for `len` on lists. +/// +/// Produces the length of the list. +#[derive(Debug)] +pub(in crate::evaluators) struct ListLenEvaluator { + list: ValueRef, +} + +impl EvaluatorFactory for ListLenEvaluator { + fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { + let input_type = info.args[0].data_type.clone(); + match input_type { + DataType::List(_) => (), + other => anyhow::bail!("expected list type, saw {:?}", other), + }; + + let list = info.unpack_argument()?; + Ok(Box::new(Self { list })) + } +} + +impl Evaluator for ListLenEvaluator { + fn evaluate(&mut self, info: &dyn crate::RuntimeInfo) -> anyhow::Result { + let input = info.value(&self.list)?.array_ref()?; + let result = arrow::compute::kernels::length::length(input.as_ref())?; + Ok(Arc::new(result)) + } +} diff --git a/crates/sparrow-instructions/src/evaluators/list/union.rs b/crates/sparrow-instructions/src/evaluators/list/union.rs new file mode 100644 index 000000000..995a4c0f2 --- /dev/null +++ b/crates/sparrow-instructions/src/evaluators/list/union.rs @@ -0,0 +1,186 @@ +use std::sync::Arc; + +use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; + +use crate::ValueRef; +use arrow::array::{Array, ArrayRef, AsArray, BufferBuilder, ListArray}; +use arrow::buffer::{OffsetBuffer, ScalarBuffer}; +use arrow::row::{RowConverter, SortField}; +use arrow_schema::{DataType, FieldRef}; +use hashbrown::HashSet; +use itertools::Itertools; + +/// Evaluator for the `union` instruction. +#[derive(Debug)] +pub struct UnionEvaluator { + a: ValueRef, + b: ValueRef, + field: FieldRef, + row_converter: RowConverter, +} + +impl EvaluatorFactory for UnionEvaluator { + fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { + anyhow::ensure!(info.args[0].data_type() == info.args[1].data_type()); + let DataType::List(field) = info.args[0].data_type() else { + anyhow::bail!( + "Unable to union non-list type {:?}", + info.args[0].data_type() + ) + }; + let field = field.clone(); + let row_converter = RowConverter::new(vec![SortField::new(field.data_type().clone())])?; + let (a, b) = info.unpack_arguments()?; + + Ok(Box::new(Self { + a, + b, + field, + row_converter, + })) + } +} + +impl Evaluator for UnionEvaluator { + fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { + let a = info.value(&self.a)?.array_ref()?; + let b = info.value(&self.b)?.array_ref()?; + assert_eq!(a.len(), b.len()); + + union( + self.field.clone(), + &mut self.row_converter, + a.as_ref(), + b.as_ref(), + ) + } +} + +fn union( + field: FieldRef, + row_converter: &mut RowConverter, + a: &dyn Array, + b: &dyn Array, +) -> anyhow::Result { + let a_list: &ListArray = a.as_list(); + let b_list: &ListArray = b.as_list(); + + let mut offsets = BufferBuilder::new(a_list.len() + 1); + + let mut indices = Vec::with_capacity(a_list.values().len() + b_list.values().len()); + + let mut offset = 0u32; + offsets.append(offset); + + let mut included = HashSet::new(); + let a_offsets = a_list + .value_offsets() + .iter() + .map(|n| *n as usize) + .tuple_windows(); + let b_offsets = b_list + .value_offsets() + .iter() + .map(|n| *n as usize) + .tuple_windows(); + for (index, ((a_start, a_end), (b_start, b_end))) in a_offsets.zip(b_offsets).enumerate() { + let a_len = a_end - a_start; + let b_len = b_end - b_start; + + if a_len == 0 && b_len == 0 { + // Nothing to do + } else if a_len == 0 { + // Only need to take from b. + offset += b_len as u32; + indices.extend((b_start..b_end).map(|n| (1, n))); + } else if b_len == 0 { + // Only need to take from a. + offset += a_len as u32; + indices.extend((a_start..a_end).map(|n| (0, n))); + } else { + let a_rows = row_converter.convert_columns(&[a_list.value(index)])?; + let b_rows = row_converter.convert_columns(&[b_list.value(index)])?; + + // INEFFICIENT: This currently copies the row into an owned vec to put + // in the hash set for deduplication. We'd likely be better off + // keeping the rows and usin their identity to do the comparison. + // This would require figuring out a way to setup a map that *didn't* + // try to drop the rows (eg., just stored references). + for (a_index, a_row) in a_rows.iter().enumerate() { + if included.insert(a_row.owned()) { + offset += 1; + indices.push((0, a_start + a_index)); + } + } + for (b_index, b_row) in b_rows.iter().enumerate() { + if included.insert(b_row.owned()) { + offset += 1; + indices.push((1, b_start + b_index)); + } + } + included.clear(); + } + offsets.append(offset); + } + + let values = arrow::compute::interleave( + &[a_list.values().as_ref(), b_list.values().as_ref()], + &indices, + )?; + + let offsets = offsets.finish(); + let offsets = ScalarBuffer::new(offsets, 0, a_list.len() + 1); + let offsets = OffsetBuffer::new(offsets); + let result = ListArray::new(field, offsets, values, None); + + Ok(Arc::new(result)) +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use arrow::array::{ArrayRef, Int32Array, ListBuilder}; + use arrow::row::{RowConverter, SortField}; + use arrow_schema::{DataType, Field}; + + #[test] + fn test_union_list() { + let field = Arc::new(Field::new("item", DataType::Int32, true)); + + let mut a = ListBuilder::new(Int32Array::builder(8)); + let mut b = ListBuilder::new(Int32Array::builder(8)); + let mut expected = ListBuilder::new(Int32Array::builder(12)); + + //0: [ 5 ] + [] = [5] + a.append_value([Some(5)]); + b.append_value([]); + expected.append_value([Some(5)]); + + // 1: [ ] + [5, 6] = [5, 6] + a.append_value([]); + b.append_value([Some(5), Some(6)]); + expected.append_value([Some(5), Some(6)]); + + // 2: [] + null = [] + a.append_value([]); + b.append_null(); + expected.append_value([]); + + // 3: [6, 7] + [6, 7, 8] = [6, 7, 8] + a.append_value([Some(6), Some(7)]); + b.append_value([Some(6), Some(7), Some(8)]); + expected.append_value([Some(6), Some(7), Some(8)]); + + // 4: [7, null, 7, 9] + [8, 9, 10, 8] = [7, null, 9, 8, 10] + a.append_value([Some(7), None, Some(7), Some(9)]); + b.append_value([Some(8), Some(9), Some(10), Some(8)]); + expected.append_value([Some(7), None, Some(9), Some(8), Some(10)]); + + let mut row_convertor = RowConverter::new(vec![SortField::new(DataType::Int32)]).unwrap(); + let actual = super::union(field, &mut row_convertor, &a.finish(), &b.finish()).unwrap(); + + let expected: ArrayRef = Arc::new(expected.finish()); + assert_eq!(&actual, &expected); + } +} diff --git a/crates/sparrow-instructions/src/evaluators/logical.rs b/crates/sparrow-instructions/src/evaluators/logical.rs index 56eeffbd7..78c527ec1 100644 --- a/crates/sparrow-instructions/src/evaluators/logical.rs +++ b/crates/sparrow-instructions/src/evaluators/logical.rs @@ -1,9 +1,9 @@ use std::sync::Arc; +use crate::ValueRef; use arrow::array::{Array, ArrayData, ArrayRef, BooleanArray}; use arrow::buffer::bitwise_bin_op_helper; use arrow::datatypes::DataType; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; diff --git a/crates/sparrow-instructions/src/evaluators/macros.rs b/crates/sparrow-instructions/src/evaluators/macros.rs index f8c0621a2..f7f821872 100644 --- a/crates/sparrow-instructions/src/evaluators/macros.rs +++ b/crates/sparrow-instructions/src/evaluators/macros.rs @@ -213,16 +213,20 @@ macro_rules! create_ordered_evaluator { macro_rules! create_typed_evaluator { ($input_type:expr, $primitive_evaluator:ident, + $struct_evaluator:ident, + $list_evaluator:ident, $map_evaluator:ident, $bool_evaluator:ident, $string_evaluator:ident, $info:expr) => {{ use $crate::evaluators::macros::Identity; - create_typed_evaluator! {$input_type, $primitive_evaluator, $map_evaluator, $bool_evaluator, $string_evaluator, Identity, $info} + create_typed_evaluator! {$input_type, $primitive_evaluator, $struct_evaluator, $list_evaluator, $map_evaluator, $bool_evaluator, $string_evaluator, Identity, $info} }}; ($input_type:expr, $primitive_evaluator:ident, + $struct_evaluator:ident, + $list_evaluator:ident, $map_evaluator:ident, $bool_evaluator:ident, $string_evaluator:ident, @@ -289,6 +293,8 @@ macro_rules! create_typed_evaluator { Boolean => $bool_evaluator::try_new($info), Utf8 => $string_evaluator::try_new($info), Map(..) => $map_evaluator::try_new($info), + List(..) => $list_evaluator::try_new($info), + Struct(..) => $struct_evaluator::try_new($info), unsupported => { Err(anyhow::anyhow!(format!( "Unsupported type {:?} for {}", diff --git a/crates/sparrow-instructions/src/evaluators/map/get.rs b/crates/sparrow-instructions/src/evaluators/map/get.rs index b90e34f9f..c05c5ad19 100644 --- a/crates/sparrow-instructions/src/evaluators/map/get.rs +++ b/crates/sparrow-instructions/src/evaluators/map/get.rs @@ -1,3 +1,4 @@ +use crate::ValueRef; use anyhow::Context; use arrow::array::{ as_boolean_array, as_largestring_array, as_primitive_array, as_string_array, Array, @@ -7,7 +8,6 @@ use arrow::buffer::OffsetBuffer; use arrow::downcast_primitive_array; use arrow_schema::DataType; use itertools::Itertools; -use sparrow_plan::ValueRef; use std::sync::Arc; use crate::{Evaluator, EvaluatorFactory, StaticInfo}; @@ -284,7 +284,6 @@ mod tests { map.append(true).unwrap(); let map = map.finish(); - println!("MAP: {:?}", map); let keys = StringArray::from(vec![ Some("hello"), diff --git a/crates/sparrow-instructions/src/evaluators/math.rs b/crates/sparrow-instructions/src/evaluators/math.rs index f0a4f55c6..6a263d00c 100644 --- a/crates/sparrow-instructions/src/evaluators/math.rs +++ b/crates/sparrow-instructions/src/evaluators/math.rs @@ -1,10 +1,10 @@ use std::marker::PhantomData; use std::sync::Arc; +use crate::ValueRef; use arrow::array::ArrayRef; use arrow::datatypes::{ArrowNativeTypeOp, ArrowNumericType}; use sparrow_arrow::scalar_value::NativeFromScalar; -use sparrow_plan::ValueRef; mod clamp; mod exp; diff --git a/crates/sparrow-instructions/src/evaluators/math/clamp.rs b/crates/sparrow-instructions/src/evaluators/math/clamp.rs index 4ec1bedea..319bb915e 100644 --- a/crates/sparrow-instructions/src/evaluators/math/clamp.rs +++ b/crates/sparrow-instructions/src/evaluators/math/clamp.rs @@ -1,10 +1,10 @@ use std::marker::PhantomData; use std::sync::Arc; +use crate::ValueRef; use arrow::array::{ArrayRef, PrimitiveArray}; use arrow::datatypes::ArrowNumericType; use itertools::izip; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; diff --git a/crates/sparrow-instructions/src/evaluators/math/exp.rs b/crates/sparrow-instructions/src/evaluators/math/exp.rs index 950525122..98f2d58d7 100644 --- a/crates/sparrow-instructions/src/evaluators/math/exp.rs +++ b/crates/sparrow-instructions/src/evaluators/math/exp.rs @@ -2,11 +2,11 @@ use std::convert::Infallible; use std::marker::PhantomData; use std::sync::Arc; +use crate::ValueRef; use arrow::array::{ArrayRef, PrimitiveArray}; use arrow::compute::kernels::arity::unary; use arrow::datatypes::ArrowNumericType; use num::traits::Float; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; diff --git a/crates/sparrow-instructions/src/evaluators/math/floor_ceil.rs b/crates/sparrow-instructions/src/evaluators/math/floor_ceil.rs index 74ffe221e..39f93c325 100644 --- a/crates/sparrow-instructions/src/evaluators/math/floor_ceil.rs +++ b/crates/sparrow-instructions/src/evaluators/math/floor_ceil.rs @@ -1,11 +1,11 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::anyhow; use arrow::array::{ArrayRef, Float32Array, Float64Array}; use arrow::compute::kernels::arity::unary; use arrow::datatypes::DataType; use sparrow_arrow::downcast::downcast_primitive_array; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; diff --git a/crates/sparrow-instructions/src/evaluators/math/min_max.rs b/crates/sparrow-instructions/src/evaluators/math/min_max.rs index 7dbdcdb4d..949309693 100644 --- a/crates/sparrow-instructions/src/evaluators/math/min_max.rs +++ b/crates/sparrow-instructions/src/evaluators/math/min_max.rs @@ -1,10 +1,10 @@ use std::marker::PhantomData; use std::sync::Arc; +use crate::ValueRef; use arrow::array::{ArrayRef, PrimitiveArray}; use arrow::compute::math_op; use arrow::datatypes::{ArrowNativeTypeOp, ArrowNumericType}; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; diff --git a/crates/sparrow-instructions/src/evaluators/math/powf.rs b/crates/sparrow-instructions/src/evaluators/math/powf.rs index d97d87dda..b08d9cf0d 100644 --- a/crates/sparrow-instructions/src/evaluators/math/powf.rs +++ b/crates/sparrow-instructions/src/evaluators/math/powf.rs @@ -1,12 +1,12 @@ use std::marker::PhantomData; use std::sync::Arc; +use crate::ValueRef; use arrow::array::{ArrayRef, PrimitiveArray}; use arrow::compute::math_op; use arrow::datatypes::{ArrowNativeTypeOp, ArrowNumericType}; use num::traits::Pow; use sparrow_arrow::scalar_value::NativeFromScalar; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; diff --git a/crates/sparrow-instructions/src/evaluators/math/round.rs b/crates/sparrow-instructions/src/evaluators/math/round.rs index 3d3e5f230..03b110526 100644 --- a/crates/sparrow-instructions/src/evaluators/math/round.rs +++ b/crates/sparrow-instructions/src/evaluators/math/round.rs @@ -1,11 +1,11 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::anyhow; use arrow::array::{ArrayRef, Float32Array, Float64Array}; use arrow::compute::kernels::arity::unary; use arrow::datatypes::DataType; use sparrow_arrow::downcast::downcast_primitive_array; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; diff --git a/crates/sparrow-instructions/src/evaluators/record.rs b/crates/sparrow-instructions/src/evaluators/record.rs index a36d71a14..639728690 100644 --- a/crates/sparrow-instructions/src/evaluators/record.rs +++ b/crates/sparrow-instructions/src/evaluators/record.rs @@ -4,12 +4,12 @@ use std::sync::Arc; +use crate::ValueRef; use anyhow::anyhow; use arrow::array::ArrayRef; use arrow::datatypes::{DataType, FieldRef, Fields}; use itertools::Itertools; use sparrow_arrow::utils::make_struct_array; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; diff --git a/crates/sparrow-instructions/src/evaluators/string.rs b/crates/sparrow-instructions/src/evaluators/string.rs index db892981b..9c0163663 100644 --- a/crates/sparrow-instructions/src/evaluators/string.rs +++ b/crates/sparrow-instructions/src/evaluators/string.rs @@ -1,9 +1,9 @@ use std::sync::Arc; +use crate::ValueRef; use arrow::array::ArrayRef; use arrow::datatypes::DataType; use sparrow_arrow::downcast::downcast_primitive_array; -use sparrow_plan::ValueRef; use crate::{Evaluator, EvaluatorFactory, RuntimeInfo, StaticInfo}; diff --git a/crates/sparrow-instructions/src/evaluators/time.rs b/crates/sparrow-instructions/src/evaluators/time.rs index bf5be9b45..0da139473 100644 --- a/crates/sparrow-instructions/src/evaluators/time.rs +++ b/crates/sparrow-instructions/src/evaluators/time.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use anyhow::anyhow; +use crate::ValueRef; use arrow::array::{ ArrayRef, Int32Array, IntervalDayTimeArray, IntervalYearMonthArray, TimestampNanosecondArray, UInt32Array, @@ -12,14 +12,9 @@ use arrow::datatypes::{ }; use arrow::temporal_conversions::timestamp_ns_to_datetime; use chrono::Datelike; -use serde::de::DeserializeOwned; -use serde::Serialize; -use sparrow_arrow::scalar_value::ScalarValue; -use sparrow_kernels::lag::LagPrimitive; -use sparrow_plan::ValueRef; use crate::evaluators::{Evaluator, RuntimeInfo}; -use crate::{EvaluatorFactory, StateToken, StaticInfo}; +use crate::{EvaluatorFactory, StaticInfo}; /// Evaluator for the `TimeOf` instruction. pub(super) struct TimeOfEvaluator {} @@ -400,60 +395,3 @@ impl EvaluatorFactory for MonthsBetweenEvaluator { Ok(Box::new(Self { time1, time2 })) } } - -/// Evaluator for the `Lag` instruction. -pub(super) struct PrimitiveLagEvaluator { - input: ValueRef, - lag: usize, - state: LagPrimitive, -} - -impl Evaluator for PrimitiveLagEvaluator -where - T: ArrowPrimitiveType, - T::Native: Serialize + DeserializeOwned + Copy, -{ - fn evaluate(&mut self, info: &dyn RuntimeInfo) -> anyhow::Result { - let grouping = info.grouping(); - self.state.execute( - grouping.num_groups(), - grouping.group_indices(), - &info.value(&self.input)?.array_ref()?, - self.lag, - ) - } - - fn state_token(&self) -> Option<&dyn StateToken> { - Some(&self.state) - } - - fn state_token_mut(&mut self) -> Option<&mut dyn StateToken> { - Some(&mut self.state) - } -} - -impl EvaluatorFactory for PrimitiveLagEvaluator -where - T: ArrowPrimitiveType, - T::Native: Serialize + DeserializeOwned + Copy, -{ - fn try_new(info: StaticInfo<'_>) -> anyhow::Result> { - let (lag, input) = info.unpack_arguments()?; - let lag = lag - .literal_value() - .ok_or_else(|| anyhow!("Expected value of lag to be a literal, was {:?}", lag))?; - match lag { - ScalarValue::Int64(None) => Err(anyhow!("Unexpected `null` size for lag")), - ScalarValue::Int64(Some(lag)) if *lag <= 0 => { - Err(anyhow!("Unexpected value of lag ({}) -- must be > 0", lag)) - } - ScalarValue::Int64(Some(lag)) => { - let lag = *lag as usize; - // TODO: Pass the value of `lag` to the state. - let state = LagPrimitive::new(); - Ok(Box::new(Self { input, lag, state })) - } - unexpected => anyhow::bail!("Unexpected literal {:?} for lag", unexpected), - } - } -} diff --git a/crates/sparrow-plan/src/ids.rs b/crates/sparrow-instructions/src/ids.rs similarity index 100% rename from crates/sparrow-plan/src/ids.rs rename to crates/sparrow-instructions/src/ids.rs diff --git a/crates/sparrow-plan/src/inst.rs b/crates/sparrow-instructions/src/inst.rs similarity index 69% rename from crates/sparrow-plan/src/inst.rs rename to crates/sparrow-instructions/src/inst.rs index 5bf2e9fb4..3fa209b5a 100644 --- a/crates/sparrow-plan/src/inst.rs +++ b/crates/sparrow-instructions/src/inst.rs @@ -1,4 +1,5 @@ use std::fmt; +use std::hash::Hash; use std::sync::Arc; use arrow::datatypes::DataType; @@ -8,15 +9,7 @@ use sparrow_syntax::{FeatureSetPart, FenlType, Signature}; use static_init::dynamic; use strum::EnumProperty; -use crate::value::ValueRef; - -/// The mode an instruction is being used in. -/// -/// Affects which signature is used. -pub enum Mode { - Dfg, - Plan, -} +use crate::{value::ValueRef, Udf}; /// Enumeration of the instruction operations. /// @@ -39,6 +32,7 @@ pub enum Mode { strum_macros::EnumIter, strum_macros::EnumProperty, strum_macros::IntoStaticStr, + strum_macros::EnumDiscriminants, Eq, FromStr, Hash, @@ -61,8 +55,11 @@ pub enum InstOp { #[strum(props(signature = "coalesce(values+: T) -> T"))] Coalesce, #[strum(props( - dfg_signature = "count_if(input: T, window: window = null) -> u32", - plan_signature = "count_if(input: T, ticks: bool = null, slide_duration: i64 = null) -> \ + signature = "collect(input: T, const max: i64, const min: i64 = 0, ticks: bool = null, slide_duration: i64 = null) -> list" + ))] + Collect, + #[strum(props( + signature = "count_if(input: T, ticks: bool = null, slide_duration: i64 = null) -> \ u32" ))] CountIf, @@ -87,10 +84,11 @@ pub enum InstOp { #[strum(props(signature = "exp(power: f64) -> f64"))] Exp, #[strum(props( - dfg_signature = "first(input: T, window: window = null) -> T", - plan_signature = "first(input: T, ticks: bool = null, slide_duration: i64 = null) -> T" + signature = "first(input: T, ticks: bool = null, slide_duration: i64 = null) -> T" ))] First, + #[strum(props(signature = "flatten(input: list>) -> list"))] + Flatten, #[strum(props(signature = "floor(n: N) -> N"))] Floor, #[strum(props(signature = "get(key: K, map: map) -> V"))] @@ -103,6 +101,8 @@ pub enum InstOp { Hash, #[strum(props(signature = "if(condition: bool, value: T) -> T"))] If, + #[strum(props(signature = "index(i: i64, list: list) -> T"))] + Index, #[strum(props(signature = "is_valid(input: T) -> bool"))] IsValid, // HACK: This instruction does not show up in the plan/does not have an evaluator. @@ -111,15 +111,14 @@ pub enum InstOp { Json, #[strum(props(signature = "json_field(s: string, field: string) -> string"))] JsonField, - #[strum(props(signature = "lag(n: i64, input: O) -> O"))] - Lag, #[strum(props( - dfg_signature = "last(input: T, window: window = null) -> T", - plan_signature = "last(input: T, ticks: bool = null, slide_duration: i64 = null) -> T" + signature = "last(input: T, ticks: bool = null, slide_duration: i64 = null) -> T" ))] Last, #[strum(props(signature = "len(s: string) -> i32"))] Len, + #[strum(props(signature = "list_len(input: list) -> i32"))] + ListLen, #[strum(props(signature = "logical_and(a: bool, b: bool) -> bool"))] LogicalAnd, #[strum(props(signature = "logical_or(a: bool, b: bool) -> bool"))] @@ -131,19 +130,16 @@ pub enum InstOp { #[strum(props(signature = "lte(a: O, b: O) -> bool"))] Lte, #[strum(props( - dfg_signature = "max(input: O, window: window = null) -> O", - plan_signature = "max(input: O, ticks: bool = null, slide_duration: i64 = null) -> O" + signature = "max(input: O, ticks: bool = null, slide_duration: i64 = null) -> O" ))] Max, #[strum(props( - dfg_signature = "mean(input: N, window: window = null) -> f64", - plan_signature = "mean(input: N, ticks: bool = null, slide_duration: i64 = null) -> \ + signature = "mean(input: N, ticks: bool = null, slide_duration: i64 = null) -> \ f64" ))] Mean, #[strum(props( - dfg_signature = "min(input: O, window: window = null) -> O", - plan_signature = "min(input: O, ticks: bool = null, slide_duration: i64 = null) -> O" + signature = "min(input: O, ticks: bool = null, slide_duration: i64 = null) -> O" ))] Min, #[strum(props(signature = "month_of_year(time: timestamp_ns) -> u32"))] @@ -183,8 +179,7 @@ pub enum InstOp { ))] Substring, #[strum(props( - dfg_signature = "sum(input: N, window: window = null) -> N", - plan_signature = "sum(input: N, ticks: bool = null, slide_duration: i64 = null) -> \ + signature = "sum(input: N, ticks: bool = null, slide_duration: i64 = null) -> \ N" ))] Sum, @@ -192,9 +187,10 @@ pub enum InstOp { TimeOf, #[strum(props(signature = "upper(s: string) -> string"))] Upper, + #[strum(props(signature = "union(a: list, b: list) -> list"))] + Union, #[strum(props( - dfg_signature = "variance(input: N, window: window = null) -> f64", - plan_signature = "variance(input: N, ticks: bool = null, slide_duration: i64 = null) \ + signature = "variance(input: N, ticks: bool = null, slide_duration: i64 = null) \ -> f64" ))] Variance, @@ -215,19 +211,17 @@ impl InstOp { ) } - pub fn signature(&self, mode: Mode) -> &'static Signature { - match mode { - Mode::Dfg => INST_OP_SIGNATURES[*self].dfg(), - Mode::Plan => INST_OP_SIGNATURES[*self].plan(), - } + pub fn signature(&self) -> &'static Signature { + &INST_OP_SIGNATURES[*self] } pub fn name(&self) -> &'static str { - self.signature(Mode::Dfg).name() + self.signature().name() } } -#[derive(Clone, Debug, PartialEq, Hash, Eq, Ord, PartialOrd)] +// #[derive(Clone, Debug, PartialEq, Hash, Eq, Ord, PartialOrd)] +#[derive(Debug, Eq, Ord, PartialOrd)] pub enum InstKind { /// Applies a callable function to the inputs. Simple(InstOp), @@ -239,6 +233,46 @@ pub enum InstKind { /// /// The number of arguments should match the number of fields. Record, + /// A user defined function. + Udf(Arc), +} + +impl Clone for InstKind { + fn clone(&self) -> Self { + match self { + Self::Simple(arg0) => Self::Simple(*arg0), + Self::FieldRef => Self::FieldRef, + Self::Cast(arg0) => Self::Cast(arg0.clone()), + Self::Record => Self::Record, + Self::Udf(arg0) => Self::Udf(arg0.clone()), + } + } +} + +impl PartialEq for InstKind { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Simple(l0), Self::Simple(r0)) => l0 == r0, + (Self::FieldRef, Self::FieldRef) => true, + (Self::Cast(l0), Self::Cast(r0)) => l0 == r0, + (Self::Record, Self::Record) => true, + (Self::Udf(l0), Self::Udf(r0)) => l0 == r0, + _ => false, + } + } +} + +impl Hash for InstKind { + fn hash(&self, state: &mut H) { + core::mem::discriminant(self).hash(state); + match self { + InstKind::Udf(udf) => udf.hash(state), + InstKind::Simple(op) => op.hash(state), + InstKind::Cast(dt) => dt.hash(state), + InstKind::Record => {} + InstKind::FieldRef => {} + } + } } impl fmt::Display for InstKind { @@ -248,6 +282,7 @@ impl fmt::Display for InstKind { InstKind::FieldRef => write!(f, "field"), InstKind::Cast(data_type) => write!(f, "cast:{data_type}"), InstKind::Record => write!(f, "record"), + InstKind::Udf(udf) => write!(f, "{}", udf.signature().name()), } } } @@ -264,50 +299,19 @@ impl Inst { } } -struct OpSignatures { - dfg: Arc, - plan: Arc, -} - -impl OpSignatures { - pub fn dfg(&self) -> &Signature { - self.dfg.as_ref() - } - - pub fn plan(&self) -> &Signature { - self.plan.as_ref() - } -} - #[dynamic] -static INST_OP_SIGNATURES: EnumMap = { +static INST_OP_SIGNATURES: EnumMap> = { enum_map! { op => { let signature = parse_signature(op, "signature"); - let dfg_signature = parse_signature(op, "dfg_signature"); - let plan_signature = parse_signature(op, "plan_signature"); - - let signatures = match (signature, dfg_signature, plan_signature) { - (None, Some(dfg), Some(plan)) => { - assert_eq!(dfg.name(), plan.name(), "Names for both DFG and Plan signature must be the same"); - OpSignatures { dfg, plan } - }, - (Some(shared), None, None) => { - OpSignatures { - dfg: shared.clone(), - plan: shared - } - } - (shared, dfg, plan) => { - // Must have either (shared) or (dfg and plan). - panic!("Missing or invalid signatures for instruction {op:?}: shared={shared:?}, dfg={dfg:?}, plan={plan:?}") - } + let signature = match signature { + Some(s) => s, + None => panic!("Missing or invalid signatures for instruction {op:?}: signature={signature:?}") }; - for parameter in signatures.plan().parameters().types() { + for parameter in signature.parameters().types() { if matches!(parameter.inner(), FenlType::Window) { - panic!("Illegal type '{}' in plan_signature for instruction {:?}", parameter.inner(), op) - + panic!("Illegal type '{}' in signature for instruction {:?}", parameter.inner(), op) } } @@ -316,10 +320,10 @@ static INST_OP_SIGNATURES: EnumMap = { // make sure we don't accidentally introduce `InstOp` with // these names (since they are handled specially in the // planning / execution). - assert_ne!(signatures.dfg().name(), "record", "'record' is a reserved instruction name"); - assert_ne!(signatures.dfg().name(), "field_ref", "'field_ref' is a reserved instruction name"); + assert_ne!(signature.name(), "record", "'record' is a reserved instruction name"); + assert_ne!(signature.name(), "field_ref", "'field_ref' is a reserved instruction name"); - signatures + signature } } }; diff --git a/crates/sparrow-instructions/src/lib.rs b/crates/sparrow-instructions/src/lib.rs index 09372bd1a..89dfef5f1 100644 --- a/crates/sparrow-instructions/src/lib.rs +++ b/crates/sparrow-instructions/src/lib.rs @@ -13,13 +13,21 @@ mod columnar_value; mod compute_store; pub mod evaluators; mod grouping; +mod ids; +mod inst; mod state; mod store_key; +mod udf; +mod value; pub use aggregation_args::*; pub use columnar_value::*; pub use compute_store::*; pub use evaluators::*; pub use grouping::*; +pub use ids::*; +pub use inst::*; pub use state::*; pub use store_key::*; +pub use udf::*; +pub use value::*; diff --git a/crates/sparrow-instructions/src/udf.rs b/crates/sparrow-instructions/src/udf.rs new file mode 100644 index 000000000..09bb8d5a1 --- /dev/null +++ b/crates/sparrow-instructions/src/udf.rs @@ -0,0 +1,41 @@ +use sparrow_syntax::Signature; +use std::fmt::Debug; +use std::hash::Hash; + +use crate::{Evaluator, StaticInfo}; + +/// Defines the interface for user-defined functions. +pub trait Udf: Send + Sync + Debug { + fn signature(&self) -> &Signature; + fn make_evaluator(&self, static_info: StaticInfo<'_>) -> Box; + fn uuid(&self) -> &uuid::Uuid; +} + +impl PartialOrd for dyn Udf { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for dyn Udf { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.signature() + .name() + .cmp(other.signature().name()) + .then(self.uuid().cmp(other.uuid())) + } +} + +impl Hash for dyn Udf { + fn hash(&self, state: &mut H) { + self.signature().hash(state); + } +} + +impl PartialEq for dyn Udf { + fn eq(&self, other: &Self) -> bool { + self.signature() == other.signature() + } +} + +impl Eq for dyn Udf {} diff --git a/crates/sparrow-plan/src/value.rs b/crates/sparrow-instructions/src/value.rs similarity index 100% rename from crates/sparrow-plan/src/value.rs rename to crates/sparrow-instructions/src/value.rs diff --git a/crates/sparrow-kernels/src/lag.rs b/crates/sparrow-kernels/src/lag.rs deleted file mode 100644 index a8158ab62..000000000 --- a/crates/sparrow-kernels/src/lag.rs +++ /dev/null @@ -1,159 +0,0 @@ -//! Defines the kernels for `lag`. -//! -//! The `lag` operation is stateful, like an aggregation. However, if the input -//! is discrete, the output is still discrete. Specifically, it doesn't change -//! the continuity of the input. -//! -//! We currently only implement `lag` for numeric types, since we anticipate -//! those being most useful. Other types (booleans, strings, structs) can be -//! implemented as needed. -//! -//! # TODO Buffer Sharing -//! -//! Currently, no optimizations is done to share the `lag` buffer. Specifically, -//! if a query contains both `lag(e, 1)` and `lag(e, 2)` we could satisfy that -//! with a single `lag` buffer holding the 2 preceding outputs. Doing this -//! optimization is relatively straightforward -- collect all `lag` operations -//! for each expression and either (1) rewrite them to something like -//! `lag_buffer(e, max(N))` and `lag_extract(lag_buffer(e, max(N)), n)` where -//! the first manages the buffer and the second extracts from it or (2) -//! configure the plan with some `lag` information similar to ticks. Option (1) -//! reuses machinery, but would run into some weirdness since it would need to -//! produce a *value*, or bypass the standard instruction behavior. -//! -//! Another option would be to combine them into something like `lag(e, 1, 2, 5, -//! 7)` to indicate the positions required. Then produce a struct containing -//! four columns. So: -//! -//! ```no_run -//! { a: lag(e, 1), b: lag(e, 2), c: lag(e, 5), d: lag(e, 7) } -//! ``` -//! -//! Could be compiled to -//! -//! ``` -//! let lag_buffer = lag(e, 1, 2, 5, 7) -//! in { a: lag_buffer.1, b: lag_buffer.2, c: lag_buffer.5, d: lag_buffer.7 } -//! ``` -//! -//! Where the input has *all* required inputs and the output puts them as fields -//! of a struct. - -use std::collections::VecDeque; -use std::sync::Arc; - -use arrow::array::{Array, ArrayRef, PrimitiveArray, UInt32Array}; -use arrow::datatypes::ArrowPrimitiveType; -use itertools::izip; -use sparrow_arrow::downcast::downcast_primitive_array; - -pub struct LagPrimitive { - pub state: Vec>, -} - -impl LagPrimitive { - pub fn new() -> Self { - Self { - state: Vec::default(), - } - } -} - -impl Default for LagPrimitive { - fn default() -> Self { - Self::new() - } -} - -impl std::fmt::Debug for LagPrimitive { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("LagPrimitive") - .field("state", &self.state) - .finish() - } -} - -impl LagPrimitive -where - T: ArrowPrimitiveType, - T::Native: Copy, -{ - #[inline] - fn execute_one( - &mut self, - entity_index: u32, - input_is_valid: bool, - input: T::Native, - lag: usize, - ) -> Option { - let entity_index = entity_index as usize; - let queue = &mut self.state[entity_index]; - - // We don't put the current value in until after we get it, so `lag(1)` is in - // position 0, `lag(2)` is in position 1, etc. - let result = if queue.len() == lag { - queue.front().copied() - } else { - None - }; - - if input_is_valid { - if queue.len() == lag { - queue.pop_front(); - } - queue.push_back(input); - }; - - result - } - - fn ensure_entity_capacity(&mut self, key_capacity: usize, n: usize) { - if self.state.len() < key_capacity { - self.state.resize(key_capacity, VecDeque::with_capacity(n)) - } - } - - /// Update the lag state with the given inputs and return the - /// lagging value. - /// - /// The `key_capacity` must be greater than all values in the - /// `entity_indices`. - pub fn execute( - &mut self, - key_capacity: usize, - entity_indices: &UInt32Array, - input: &ArrayRef, - lag: usize, - ) -> anyhow::Result { - assert_eq!(entity_indices.len(), input.len()); - - // Make sure the internal buffers are large enough for the accumulators we may - // want to store. - self.ensure_entity_capacity(key_capacity, lag); - - let input = downcast_primitive_array::(input.as_ref())?; - - // TODO: Handle the case where the input is empty (null_count == len) and we - // don't need to compute anything. - - let result: PrimitiveArray = if let Some(is_valid) = input.nulls() { - let iter = izip!(is_valid.iter(), entity_indices.values(), input.values()).map( - |(is_valid, entity_index, input)| { - self.execute_one(*entity_index, is_valid, *input, lag) - }, - ); - - // SAFETY: `izip!` and map are trusted length iterators. - unsafe { PrimitiveArray::from_trusted_len_iter(iter) } - } else { - // Handle the case where input contains no nulls. This allows us to - // use `prim_input.values()` instead of `prim_input.iter()`. - let iter = izip!(entity_indices.values(), input.values()) - .map(|(entity_index, input)| self.execute_one(*entity_index, true, *input, lag)); - - // SAFETY: `izip!` and `map` are trusted length iterators. - unsafe { PrimitiveArray::from_trusted_len_iter(iter) } - }; - Ok(Arc::new(result)) - } -} diff --git a/crates/sparrow-kernels/src/lib.rs b/crates/sparrow-kernels/src/lib.rs index 63faaaafe..b12d59bbe 100644 --- a/crates/sparrow-kernels/src/lib.rs +++ b/crates/sparrow-kernels/src/lib.rs @@ -9,7 +9,6 @@ clippy::print_stderr, clippy::undocumented_unsafe_blocks )] -pub mod lag; mod ordered_cast; pub mod string; pub mod time; diff --git a/crates/sparrow-main/Cargo.toml b/crates/sparrow-main/Cargo.toml index c84d0323d..ebdcd0b3e 100644 --- a/crates/sparrow-main/Cargo.toml +++ b/crates/sparrow-main/Cargo.toml @@ -38,7 +38,6 @@ sparrow-core = { path = "../sparrow-core" } sparrow-instructions = { path = "../sparrow-instructions" } sparrow-kernels = { path = "../sparrow-kernels" } sparrow-materialize = { path = "../sparrow-materialize" } -sparrow-plan = { path = "../sparrow-plan" } sparrow-qfr = { path = "../sparrow-qfr" } sparrow-runtime = { path = "../sparrow-runtime" } sparrow-syntax = { path = "../sparrow-syntax" } diff --git a/crates/sparrow-main/src/serve.rs b/crates/sparrow-main/src/serve.rs index 90dc4845d..ac7aa10e7 100644 --- a/crates/sparrow-main/src/serve.rs +++ b/crates/sparrow-main/src/serve.rs @@ -88,9 +88,7 @@ impl ServeCommand { let reflection_service = tonic_reflection::server::Builder::configure() .register_encoded_file_descriptor_set(sparrow_api::FILE_DESCRIPTOR_SET) - .register_encoded_file_descriptor_set( - tonic_health::proto::GRPC_HEALTH_V1_FILE_DESCRIPTOR_SET, - ) + .register_encoded_file_descriptor_set(tonic_health::pb::FILE_DESCRIPTOR_SET) .build() .unwrap(); diff --git a/crates/sparrow-main/src/serve/compute_service.rs b/crates/sparrow-main/src/serve/compute_service.rs index 2169b3dcf..3b8a91930 100644 --- a/crates/sparrow-main/src/serve/compute_service.rs +++ b/crates/sparrow-main/src/serve/compute_service.rs @@ -20,6 +20,7 @@ use sparrow_instructions::ComputeStore; use sparrow_materialize::{Materialization, MaterializationControl}; use sparrow_qfr::kaskada::sparrow::v1alpha::{flight_record_header, FlightRecordHeader}; use sparrow_runtime::execute::error::Error; +use sparrow_runtime::execute::output::Destination; use sparrow_runtime::stores::{ObjectStoreRegistry, ObjectStoreUrl}; use tempfile::NamedTempFile; @@ -301,7 +302,8 @@ fn start_materialization_impl( let destination = request .destination .ok_or(Error::MissingField("destination"))?; - + let destination = + Destination::try_from(destination).change_context(Error::InvalidDestination)?; let materialization = Materialization::new(id, plan, tables, destination); // TODO: Support lateness // Spawns the materialization thread and begin exeution @@ -792,7 +794,7 @@ mod tests { let parquet_metadata = reader.metadata(); let parquet_metadata = parquet_metadata.file_metadata(); - assert_eq!(parquet_metadata.num_rows(), 54_068); + assert_eq!(parquet_metadata.num_rows(), 49_540); assert_eq!(parquet_metadata.schema_descr().num_columns(), 11); // Second, redact the output paths and check the response. diff --git a/crates/sparrow-main/src/serve/preparation_service.rs b/crates/sparrow-main/src/serve/preparation_service.rs index 5523b0128..fc07f94ac 100644 --- a/crates/sparrow-main/src/serve/preparation_service.rs +++ b/crates/sparrow-main/src/serve/preparation_service.rs @@ -12,7 +12,7 @@ use tonic::Response; use crate::IntoStatus; // The current preparation ID of the data preparation service -const CURRENT_PREP_ID: i32 = 6; +const CURRENT_PREP_ID: i32 = 7; #[derive(Debug)] pub(super) struct PreparationServiceImpl { diff --git a/crates/sparrow-main/src/serve/snapshots/sparrow_main__serve__compute_service__tests__sliced_query_smoke.snap b/crates/sparrow-main/src/serve/snapshots/sparrow_main__serve__compute_service__tests__sliced_query_smoke.snap index 421342a71..cbdf08b06 100644 --- a/crates/sparrow-main/src/serve/snapshots/sparrow_main__serve__compute_service__tests__sliced_query_smoke.snap +++ b/crates/sparrow-main/src/serve/snapshots/sparrow_main__serve__compute_service__tests__sliced_query_smoke.snap @@ -5,14 +5,14 @@ expression: results - state: 2 is_query_done: true progress: - total_input_rows: 54068 - processed_input_rows: 54068 + total_input_rows: 49540 + processed_input_rows: 49540 buffered_rows: 0 processed_buffered_rows: 0 min_event_time: 0 max_event_time: 0 output_time: 0 - produced_output_rows: 54068 + produced_output_rows: 49540 flight_record_path: ~ plan_yaml_path: ~ compute_snapshots: [] diff --git a/crates/sparrow-main/tests/e2e/aggregation_tests.rs b/crates/sparrow-main/tests/e2e/aggregation_tests.rs index 487574210..73469f06c 100644 --- a/crates/sparrow-main/tests/e2e/aggregation_tests.rs +++ b/crates/sparrow-main/tests/e2e/aggregation_tests.rs @@ -13,8 +13,8 @@ use crate::QueryFixture; async fn test_sum_i64_final() { insta::assert_snapshot!(QueryFixture::new("{ sum_field: sum(Numbers.m) }").with_final_results().run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_field - 1996-12-20T00:40:02.000000001,18446744073709551615,3650215962958587783,A,34 - 1996-12-20T00:40:02.000000001,18446744073709551615,11753611437813598533,B,24 + 1996-12-20T00:40:02.000000001,18446744073709551615,2867199309159137213,B,24 + 1996-12-20T00:40:02.000000001,18446744073709551615,12960666915911099378,A,34 "###); } @@ -22,12 +22,12 @@ async fn test_sum_i64_final() { async fn test_sum_since_tick_i64() { insta::assert_snapshot!(QueryFixture::new("{ sum_field: sum(Numbers.m, window=since(daily())) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_field - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,22 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,22 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,34 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,34 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,22 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,22 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,34 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,34 "###); } @@ -35,48 +35,48 @@ async fn test_sum_since_tick_i64() { async fn test_since_tick_with_pipe_sum() { insta::assert_snapshot!(QueryFixture::new("{ sum_field: sum(Numbers.m, window=since( (Numbers.m | (daily() or $input > 10)) ) ) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_field - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,22 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,22 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } #[tokio::test] async fn test_since_predicate_sum_i64() { insta::assert_snapshot!(QueryFixture::new("{ sum_field: sum(Numbers.m, window=since(Numbers.n > 7)) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_field - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,12 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,12 "###); } #[tokio::test] async fn test_sliding_tick_sum_i64() { insta::assert_snapshot!(QueryFixture::new("{ sum_field: sum(Numbers.m, window=sliding(2, daily())) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_field - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,22 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,22 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,34 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,34 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,22 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,22 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,34 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,34 "###); } #[tokio::test] async fn test_sliding_predicate_sum_i64() { insta::assert_snapshot!(QueryFixture::new("{ sum_field: sum(Numbers.m, window=sliding(2, Numbers.m > 1)) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_field - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,22 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,29 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,12 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,22 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,29 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,12 "###); } @@ -84,12 +84,12 @@ async fn test_sliding_predicate_sum_i64() { async fn test_nested_sum_i64() { insta::assert_snapshot!(QueryFixture::new("{ sum: sum(sum(Numbers.m))}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,27 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,49 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,83 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,117 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,27 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,49 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,83 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,117 "###); } @@ -97,12 +97,12 @@ async fn test_nested_sum_i64() { async fn test_sum_i64() { insta::assert_snapshot!(QueryFixture::new("{ sum: sum(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,22 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,22 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,34 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,34 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,22 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,22 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,34 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,34 "###); } @@ -110,12 +110,12 @@ async fn test_sum_i64() { async fn test_sum_f64() { insta::assert_snapshot!(QueryFixture::new("{ sum: sum(Numbers.m)}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,22.8 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,22.8 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,35.2 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,35.2 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,22.8 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,22.8 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,35.2 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,35.2 "###); } @@ -123,12 +123,12 @@ async fn test_sum_f64() { async fn test_mean_i64() { insta::assert_snapshot!(QueryFixture::new("{ mean: mean(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,mean - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.0 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,11.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,11.0 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,11.333333333333334 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,11.333333333333334 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.0 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,11.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,11.0 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,11.333333333333334 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,11.333333333333334 "###); } @@ -136,12 +136,12 @@ async fn test_mean_i64() { async fn test_mean_f64() { insta::assert_snapshot!(QueryFixture::new("{ mean: mean(Numbers.m)}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,mean - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,11.400000000000002 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,11.400000000000002 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,11.733333333333334 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,11.733333333333334 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,11.400000000000002 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,11.400000000000002 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,11.733333333333334 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,11.733333333333334 "###); } @@ -149,12 +149,12 @@ async fn test_mean_f64() { async fn test_variance_i64() { insta::assert_snapshot!(QueryFixture::new("{ variance: variance(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,variance - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,36.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,36.0 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,24.222222222222225 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,24.222222222222225 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,36.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,36.0 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,24.222222222222225 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,24.222222222222225 "###); } @@ -162,12 +162,12 @@ async fn test_variance_i64() { async fn test_variance_f64() { insta::assert_snapshot!(QueryFixture::new("{ variance: variance(Numbers.m)}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,variance - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,38.440000000000005 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,38.440000000000005 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,25.848888888888894 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,25.848888888888894 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,38.440000000000005 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,38.440000000000005 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,25.848888888888894 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,25.848888888888894 "###); } @@ -175,12 +175,12 @@ async fn test_variance_f64() { async fn test_stddev_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, stddev: stddev(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,stddev - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,6.0 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,4.921607686744467 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,4.921607686744467 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,6.0 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,4.921607686744467 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,4.921607686744467 "###); } @@ -188,12 +188,12 @@ async fn test_stddev_i64() { async fn test_stddev_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, stddev: stddev(Numbers.m)}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,stddev - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,6.2 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,5.0841802573166985 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,5.0841802573166985 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,6.2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,5.0841802573166985 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,5.0841802573166985 "###); } @@ -201,12 +201,12 @@ async fn test_stddev_f64() { async fn test_min_f64() { insta::assert_snapshot!(QueryFixture::new("{ min: min(Numbers.m)}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,min - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,5.2 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,5.2 "###); } @@ -214,12 +214,12 @@ async fn test_min_f64() { async fn test_min_i64() { insta::assert_snapshot!(QueryFixture::new("{ min: min(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,min - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,5 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,5 "###); } @@ -227,12 +227,12 @@ async fn test_min_i64() { async fn test_min_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ min: min(Times.n)}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,min - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,4 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,4 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,4 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,4 "###); } @@ -240,12 +240,12 @@ async fn test_min_timestamp_ns() { async fn test_max_f64() { insta::assert_snapshot!(QueryFixture::new("{ max: max(Numbers.m)}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,max - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,17.6 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,17.6 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,17.6 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,17.6 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,17.6 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,17.6 "###); } @@ -253,12 +253,12 @@ async fn test_max_f64() { async fn test_max_i64() { insta::assert_snapshot!(QueryFixture::new("{ max: max(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,max - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,17 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,17 "###); } @@ -266,12 +266,12 @@ async fn test_max_i64() { async fn test_max_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ max: max(Times.n)}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,max - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,5 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23 "###); } @@ -279,12 +279,12 @@ async fn test_max_timestamp_ns() { async fn test_count_i64() { insta::assert_snapshot!(QueryFixture::new("{ count: count(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,count - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,2 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,3 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,3 "###); } @@ -292,12 +292,12 @@ async fn test_count_i64() { async fn test_count_f64() { insta::assert_snapshot!(QueryFixture::new("{ count: count(Numbers.m)}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,count - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,2 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,3 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,3 "###); } @@ -305,12 +305,12 @@ async fn test_count_f64() { async fn test_count_record() { insta::assert_snapshot!(QueryFixture::new("{ count: count(Numbers)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,count - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,3 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,4 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,5 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,3 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,4 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,5 "###); } @@ -318,12 +318,12 @@ async fn test_count_record() { async fn test_count_string() { insta::assert_snapshot!(QueryFixture::new("{ count: count(Strings.s)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,count - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,2 - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,3 - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,5 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,2 + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,3 + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,5 "###); } @@ -331,13 +331,13 @@ async fn test_count_string() { async fn test_count_boolean() { insta::assert_snapshot!(QueryFixture::new("{ count: count(Booleans.a)}").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,count - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1 - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,2 - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,3 - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,4 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1 + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,2 + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,3 + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,4 "###); } @@ -345,12 +345,12 @@ async fn test_count_boolean() { async fn test_count_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ count: count(Times.n)}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,count - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,2 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,2 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,3 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,4 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,2 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,2 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,3 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,4 "###); } @@ -358,12 +358,12 @@ async fn test_count_timestamp_ns() { async fn test_count_if_condition() { insta::assert_snapshot!(QueryFixture::new("{ count_if: count_if(Numbers.m > 10)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,count_if - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,1 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,1 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,2 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,2 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,1 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,1 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,2 "###); } @@ -371,12 +371,12 @@ async fn test_count_if_condition() { async fn test_first_i64() { insta::assert_snapshot!(QueryFixture::new("{ first: first(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,first - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,5 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,5 "###); } @@ -384,12 +384,12 @@ async fn test_first_i64() { async fn test_first_f64() { insta::assert_snapshot!(QueryFixture::new("{ first: first(Numbers.m)}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,first - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,5.2 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,5.2 "###); } @@ -397,12 +397,12 @@ async fn test_first_f64() { async fn test_first_record() { insta::assert_snapshot!(QueryFixture::new("{ first: Numbers | first() | $input.m }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,first - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,5 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,5 "###); } @@ -410,12 +410,12 @@ async fn test_first_record() { async fn test_first_string() { insta::assert_snapshot!(QueryFixture::new("{ first: first(Strings.s)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,first - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,World - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,World - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,World - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,World + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,World + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,World + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,World + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,World "###); } @@ -423,13 +423,13 @@ async fn test_first_string() { async fn test_first_boolean() { insta::assert_snapshot!(QueryFixture::new("{ first: first(Booleans.a) }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,first - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,false - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,false - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,false + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,false "###); } @@ -437,12 +437,12 @@ async fn test_first_boolean() { async fn test_first_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ first: first(Times.n)}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,first - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,4 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,4 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,4 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,4 "###); } @@ -450,12 +450,12 @@ async fn test_first_timestamp_ns() { async fn test_last_sliding_i64() { insta::assert_snapshot!(QueryFixture::new("{ last: last(Numbers.m, window=sliding(3, monthly())) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,12 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,12 "###); } @@ -463,12 +463,12 @@ async fn test_last_sliding_i64() { async fn test_first_since_i64() { insta::assert_snapshot!(QueryFixture::new("{ first: first(Numbers.m, window=since(daily())) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,first - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,5 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,5 "###); } @@ -476,12 +476,12 @@ async fn test_first_since_i64() { async fn test_last_i64() { insta::assert_snapshot!(QueryFixture::new("{ last: last(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,12 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,12 "###); } @@ -492,8 +492,8 @@ async fn test_last_i64_finished() { // be updated, because there is no new `Numbers.m`. insta::assert_snapshot!(QueryFixture::new("{ last: last(Numbers.m) | when(finished()) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1996-12-20T00:40:02.000000001,18446744073709551615,3650215962958587783,A,12 - 1996-12-20T00:40:02.000000001,18446744073709551615,11753611437813598533,B,24 + 1996-12-20T00:40:02.000000001,18446744073709551615,2867199309159137213,B,24 + 1996-12-20T00:40:02.000000001,18446744073709551615,12960666915911099378,A,12 "###); } @@ -504,8 +504,8 @@ async fn test_last_i64_record_finished() { // be updated, because there is no new `Numbers.m`. insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m } | last() | when(finished())").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m - 1996-12-20T00:40:02.000000001,18446744073709551615,3650215962958587783,A, - 1996-12-20T00:40:02.000000001,18446744073709551615,11753611437813598533,B,24 + 1996-12-20T00:40:02.000000001,18446744073709551615,2867199309159137213,B,24 + 1996-12-20T00:40:02.000000001,18446744073709551615,12960666915911099378,A, "###); } @@ -513,12 +513,12 @@ async fn test_last_i64_record_finished() { async fn test_last_f64() { insta::assert_snapshot!(QueryFixture::new("{ last: last(Numbers.m)}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,17.6 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,12.4 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,17.6 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,12.4 "###); } @@ -526,12 +526,12 @@ async fn test_last_f64() { async fn test_last_record() { insta::assert_snapshot!(QueryFixture::new("{ last: Numbers | last() | $input.m }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } @@ -539,12 +539,12 @@ async fn test_last_record() { async fn test_last_string() { insta::assert_snapshot!(QueryFixture::new("{ last: last(Strings.s)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye "###); } @@ -552,8 +552,8 @@ async fn test_last_string() { async fn test_last_string_finished() { insta::assert_snapshot!(QueryFixture::new("{ last: last(Strings.s)}").with_final_results().run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1996-12-20T00:44:57.000000001,18446744073709551615,3650215962958587783,A,hEllo - 1996-12-20T00:44:57.000000001,18446744073709551615,11753611437813598533,B,goodbye + 1996-12-20T00:44:57.000000001,18446744073709551615,2867199309159137213,B,goodbye + 1996-12-20T00:44:57.000000001,18446744073709551615,12960666915911099378,A,hEllo "###); } @@ -561,13 +561,13 @@ async fn test_last_string_finished() { async fn test_last_boolean() { insta::assert_snapshot!(QueryFixture::new("{ last: last(Booleans.a)}").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,false - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,false + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,false "###); } @@ -575,12 +575,12 @@ async fn test_last_boolean() { async fn test_last_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ last: last(Times.n)}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,5 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23 "###); } @@ -588,12 +588,12 @@ async fn test_last_timestamp_ns() { async fn test_count_constant() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: count(0) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000,0 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,0 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,0 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000,0 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000,0 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000,0 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000,0 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,0 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,0 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000,0 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000,0 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000,0 "###); } @@ -601,12 +601,12 @@ async fn test_count_constant() { async fn test_count_if_constant() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: count_if(false) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000,0 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,0 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,0 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000,0 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000,0 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000,0 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000,0 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,0 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,0 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000,0 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000,0 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000,0 "###); } @@ -614,12 +614,12 @@ async fn test_count_if_constant() { async fn test_first_constant() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: first(2) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000, "###); } @@ -627,12 +627,12 @@ async fn test_first_constant() { async fn test_lag_constant() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: lag(2, 2) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000, "###); } @@ -640,12 +640,12 @@ async fn test_lag_constant() { async fn test_last_constant() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: last(2) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000, "###); } @@ -653,12 +653,12 @@ async fn test_last_constant() { async fn test_max_constant() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: max(2) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000, "###); } @@ -666,12 +666,12 @@ async fn test_max_constant() { async fn test_mean_constant() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: mean(2) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000, "###); } @@ -679,12 +679,12 @@ async fn test_mean_constant() { async fn test_min_constant() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: min(2) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000, "###); } @@ -692,12 +692,12 @@ async fn test_min_constant() { async fn test_min_stddev() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: stddev(2) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000, "###); } @@ -705,12 +705,12 @@ async fn test_min_stddev() { async fn test_min_sum() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: sum(2) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000, "###); } @@ -718,12 +718,12 @@ async fn test_min_sum() { async fn test_min_variance() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: variance(2) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000, "###); } @@ -731,12 +731,12 @@ async fn test_min_variance() { async fn test_first_sum_constant() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: first(sum(1)) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000, "###); } @@ -744,12 +744,12 @@ async fn test_first_sum_constant() { async fn test_add_sum_constants() { insta::assert_snapshot!(QueryFixture::new("{ time: Times.time, agg: sum(1) + sum(2) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,agg - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000, "###); } @@ -757,11 +757,11 @@ async fn test_add_sum_constants() { async fn test_literal_agg() { insta::assert_snapshot!(QueryFixture::new("{ agg_literal: sum(5), max_output: max(Numbers.m) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,agg_literal,max_output - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,17 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,,17 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,17 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,17 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,,17 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,17 "###); } diff --git a/crates/sparrow-main/tests/e2e/basic_error_tests.rs b/crates/sparrow-main/tests/e2e/basic_error_tests.rs index eb582281b..877faabdc 100644 --- a/crates/sparrow-main/tests/e2e/basic_error_tests.rs +++ b/crates/sparrow-main/tests/e2e/basic_error_tests.rs @@ -474,7 +474,7 @@ async fn test_parse_error_missing_parentheses() { - "1 | { n: Numbers.n" - " | ^ Unexpected EOF" - " |" - - " = Expected \")\",\",\",\"]\",\"in\",\"let\",\"}\"" + - " = Expected \",\",\"}\"" - "" - "" "###); @@ -497,7 +497,7 @@ async fn test_parse_error_unrecognized() { - "1 | limit x = 5 in { n: Numbers.n}" - " | ^ Invalid token 'x'" - " |" - - " = Expected \"!=\", \"(\", \")\", \"*\", \"+\", \",\", \"-\", \".\", \"/\", \":\", \"<\", \"<=\", \"<>\", \"=\", \"=\", \"==\", \">\", \">=\", \"[\", \"]\", \"and\", \"as\", \"in\", \"let\", \"or\", \"|\", \"}\"" + - " = Expected \"!=\", \"(\", \"*\", \"+\", \"-\", \".\", \"/\", \"<\", \"<=\", \"<>\", \"==\", \">\", \">=\", \"[\", \"and\", \"as\", \"or\", \"|\"" - "" - "" - severity: error @@ -731,7 +731,7 @@ async fn test_invalid_named_arguments_duplicates() { - "1 | { n: ceil(x = Numbers.n, x = 5) } " - " | ^^^^" - " |" - - " = Nearest matches: n" + - " = Nearest matches: 'n'" - "" - "" "###); diff --git a/crates/sparrow-main/tests/e2e/cast_tests.rs b/crates/sparrow-main/tests/e2e/cast_tests.rs index 6293b482a..38ad218f3 100644 --- a/crates/sparrow-main/tests/e2e/cast_tests.rs +++ b/crates/sparrow-main/tests/e2e/cast_tests.rs @@ -39,13 +39,13 @@ async fn cast_data_fixture() -> DataFixture { async fn test_implicit_cast_i64_to_f64_add() { insta::assert_snapshot!(QueryFixture::new("{ i64_field: Input.i64, f64_field: Input.f64, add: Input.i64 + Input.f64 }").run_to_csv(&cast_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64_field,f64_field,add - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,50,21.4,71.4 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,,1.22, - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,25,0.0,25.0 - 1997-12-20T00:39:59.000000000,9223372036854775810,14253486467890685049,0,35,0.0,35.0 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,25,2.2,27.2 - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,12,, - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,50,21.4,71.4 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,,1.22, + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,25,0.0,25.0 + 1997-12-20T00:39:59.000000000,9223372036854775810,11832085162654999889,0,35,0.0,35.0 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,25,2.2,27.2 + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,12,, + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,,, "###); } @@ -53,13 +53,13 @@ async fn test_implicit_cast_i64_to_f64_add() { async fn test_implicit_cast_i64_to_f64_powf() { insta::assert_snapshot!(QueryFixture::new("{ i64_field: Input.i64, f64_field: Input.f64, powf: powf(Input.i64, Input.f64) }").run_to_csv(&cast_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64_field,f64_field,powf - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,50,21.4,2.280122041201667e36 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,,1.22, - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,25,0.0,1.0 - 1997-12-20T00:39:59.000000000,9223372036854775810,14253486467890685049,0,35,0.0,1.0 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,25,2.2,1189.7837116974247 - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,12,, - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,50,21.4,2.280122041201667e36 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,,1.22, + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,25,0.0,1.0 + 1997-12-20T00:39:59.000000000,9223372036854775810,11832085162654999889,0,35,0.0,1.0 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,25,2.2,1189.7837116974247 + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,12,, + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,,, "###); } @@ -67,13 +67,13 @@ async fn test_implicit_cast_i64_to_f64_powf() { async fn test_implicit_cast_i64_to_f64_literal() { insta::assert_snapshot!(QueryFixture::new("{ i64_field: Input.i64, add: Input.i64 + 1.11 }").run_to_csv(&cast_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64_field,add - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,50,51.11 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,, - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,25,26.11 - 1997-12-20T00:39:59.000000000,9223372036854775810,14253486467890685049,0,35,36.11 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,25,26.11 - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,12,13.11 - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,50,51.11 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,, + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,25,26.11 + 1997-12-20T00:39:59.000000000,9223372036854775810,11832085162654999889,0,35,36.11 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,25,26.11 + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,12,13.11 + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,, "###); } @@ -81,13 +81,13 @@ async fn test_implicit_cast_i64_to_f64_literal() { async fn test_string_as_i64() { insta::assert_snapshot!(QueryFixture::new("{ number_string: Input.number_string, number_string_as_i64: Input.number_string as i64 }").run_to_csv(&cast_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,number_string,number_string_as_i64 - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,65,65 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,hello, - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,73,73 - 1997-12-20T00:39:59.000000000,9223372036854775810,14253486467890685049,0,73,73 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,82,82 - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,18,18 - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,65,65 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,hello, + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,73,73 + 1997-12-20T00:39:59.000000000,9223372036854775810,11832085162654999889,0,73,73 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,82,82 + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,18,18 + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,, "###); } @@ -95,13 +95,13 @@ async fn test_string_as_i64() { async fn test_i64_as_i32() { insta::assert_snapshot!(QueryFixture::new("{ i64: Input.i64, i64_as_i32: Input.i64 as i32 }").run_to_csv(&cast_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64,i64_as_i32 - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,50,50 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,, - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,25,25 - 1997-12-20T00:39:59.000000000,9223372036854775810,14253486467890685049,0,35,35 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,25,25 - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,12,12 - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,50,50 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,, + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,25,25 + 1997-12-20T00:39:59.000000000,9223372036854775810,11832085162654999889,0,35,35 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,25,25 + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,12,12 + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,, "###); } @@ -109,13 +109,13 @@ async fn test_i64_as_i32() { async fn test_f64_as_i64() { insta::assert_snapshot!(QueryFixture::new("{ f64: Input.f64, f64_as_i64: Input.f64 as i64 }").run_to_csv(&cast_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,f64,f64_as_i64 - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,21.4,21 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,1.22,1 - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,0.0,0 - 1997-12-20T00:39:59.000000000,9223372036854775810,14253486467890685049,0,0.0,0 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,2.2,2 - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,, - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,21.4,21 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,1.22,1 + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,0.0,0 + 1997-12-20T00:39:59.000000000,9223372036854775810,11832085162654999889,0,0.0,0 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,2.2,2 + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,, + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,, "###); } @@ -123,13 +123,13 @@ async fn test_f64_as_i64() { async fn test_i64_as_string() { insta::assert_snapshot!(QueryFixture::new("{ i64: Input.i64, i64_as_string: Input.i64 as string }").run_to_csv(&cast_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64,i64_as_string - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,50,50 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,, - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,25,25 - 1997-12-20T00:39:59.000000000,9223372036854775810,14253486467890685049,0,35,35 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,25,25 - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,12,12 - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,50,50 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,, + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,25,25 + 1997-12-20T00:39:59.000000000,9223372036854775810,11832085162654999889,0,35,35 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,25,25 + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,12,12 + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,, "###); } @@ -139,13 +139,13 @@ async fn test_null_literal_as_string() { // are literals. insta::assert_snapshot!(QueryFixture::new("{ i64: Input.i64, null_: null, null_as_string: null as string }").run_to_csv(&cast_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64,null_,null_as_string - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,50,, - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,,, - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,25,, - 1997-12-20T00:39:59.000000000,9223372036854775810,14253486467890685049,0,35,, - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,25,, - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,12,, - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,50,, + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,,, + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,25,, + 1997-12-20T00:39:59.000000000,9223372036854775810,11832085162654999889,0,35,, + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,25,, + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,12,, + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,,, "###); } @@ -155,13 +155,13 @@ async fn test_seconds_between_as_i64() { "let duration_s = seconds_between(Input.order_time, Input.time) in { duration_s_as_i64: duration_s as i64 }").run_to_csv(&cast_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,duration_s_as_i64 - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,-283996800 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,-126230400 - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,-126230399 - 1997-12-20T00:39:59.000000000,9223372036854775810,14253486467890685049,0,-126230398 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,-157766400 - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,-156208802 - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,-283996800 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,-126230400 + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,-126230399 + 1997-12-20T00:39:59.000000000,9223372036854775810,11832085162654999889,0,-126230398 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,-157766400 + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,-156208802 + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0, "###); } @@ -169,13 +169,13 @@ async fn test_seconds_between_as_i64() { async fn test_days_between_as_i32() { insta::assert_snapshot!(QueryFixture::new("{ i64: Input.i64, interval_days_as_i64: days(Input.i64) as i32 }").run_to_csv(&cast_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64,interval_days_as_i64 - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,50,50 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,, - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,25,25 - 1997-12-20T00:39:59.000000000,9223372036854775810,14253486467890685049,0,35,35 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,25,25 - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,12,12 - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,50,50 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,, + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,25,25 + 1997-12-20T00:39:59.000000000,9223372036854775810,11832085162654999889,0,35,35 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,25,25 + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,12,12 + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,, "###); } @@ -183,13 +183,13 @@ async fn test_days_between_as_i32() { async fn test_months_between_as_i32() { insta::assert_snapshot!(QueryFixture::new("{ i64: Input.i64, interval_months_as_i64: months(Input.i64) as i32 }").run_to_csv(&cast_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64,interval_months_as_i64 - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,50,50 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,, - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,25,25 - 1997-12-20T00:39:59.000000000,9223372036854775810,14253486467890685049,0,35,35 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,25,25 - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,12,12 - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,50,50 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,, + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,25,25 + 1997-12-20T00:39:59.000000000,9223372036854775810,11832085162654999889,0,35,35 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,25,25 + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,12,12 + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,, "###); } @@ -199,11 +199,11 @@ async fn test_bool_as_i64() { let n = Numbers.n + 11 in { m, n, eq: (m == n) as i64 }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,21,0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,14,0 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,17,1 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,20, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,21,0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,14,0 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,17,1 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,20, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } diff --git a/crates/sparrow-main/tests/e2e/coalesce_tests.rs b/crates/sparrow-main/tests/e2e/coalesce_tests.rs index 01fd51ec1..d6c93a3b3 100644 --- a/crates/sparrow-main/tests/e2e/coalesce_tests.rs +++ b/crates/sparrow-main/tests/e2e/coalesce_tests.rs @@ -8,13 +8,13 @@ use crate::QueryFixture; async fn test_coalesce_two_boolean() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, b: Booleans.b, coalesce_a_b: Booleans.a | coalesce($input, Booleans.b) }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,b,coalesce_a_b - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,true,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,false,false - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,true,true - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,false,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,true,false - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,,false - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,true,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,false,false + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,true,true + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,false,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,true,false + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,,false + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,,, "###); } @@ -45,12 +45,12 @@ async fn test_coalesce_zero() { async fn test_coalesce_one_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, coalesce_m: coalesce(Numbers.m) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,coalesce_m - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -58,12 +58,12 @@ async fn test_coalesce_one_i64() { async fn test_coalesce_one_i64_one_literal_i64() { insta::assert_snapshot!(QueryFixture::new("{ n: Times.n, coalesce_n_literal: coalesce(Times.n, 12345) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,coalesce_n_literal - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2,2 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,12345 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23,23 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,12345 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23,23 "###); } @@ -71,12 +71,12 @@ async fn test_coalesce_one_i64_one_literal_i64() { async fn test_coalesce_one_i64_one_literal_f64() { insta::assert_snapshot!(QueryFixture::new("{ n: Times.n, coalesce_n_literal: coalesce(Times.n, 12345.7) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,coalesce_n_literal - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2,2.0 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4,4.0 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5,5.0 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,12345.7 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8.0 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23,23.0 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2.0 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4,4.0 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5,5.0 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,12345.7 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8.0 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23,23.0 "###); } @@ -84,12 +84,12 @@ async fn test_coalesce_one_i64_one_literal_f64() { async fn test_coalesce_two_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, coalesce_m_n: coalesce(Numbers.m, Numbers.n) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,coalesce_m_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,9 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,9 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -97,12 +97,12 @@ async fn test_coalesce_two_i64() { async fn test_coalesce_two_i64_one_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, coalesce_m_n: coalesce(Numbers.m, Numbers.n, 42) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,coalesce_m_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,9 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,42 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,9 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,42 "###); } @@ -110,12 +110,12 @@ async fn test_coalesce_two_i64_one_literal() { async fn test_coalesce_two_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, coalesce_m_n: coalesce(Numbers.m, Numbers.n) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,coalesce_m_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,5.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,24.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,17.6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,,12.4 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,5.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,24.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,17.6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,,12.4 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -123,12 +123,12 @@ async fn test_coalesce_two_f64() { async fn test_coalesce_two_string() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, t: Strings.t, coalesce_s_t: coalesce(Strings.s, Strings.t) }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,t,coalesce_s_t - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo,hEllo,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World,world,World - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world,hello world,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,greetings, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,salutations, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye,,goodbye + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hEllo,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World,world,World + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world,hello world,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,greetings, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,salutations, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye,,goodbye "###); } @@ -136,12 +136,12 @@ async fn test_coalesce_two_string() { async fn test_coalesce_two_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ m: Times.m, n: Times.n, coalesce_m_n: coalesce(Times.m, Times.n) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,coalesce_m_n - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,4,2,4 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,3,4,3 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,5,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,11,23,11 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,4,2,4 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,3,4,3 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,5,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,11,23,11 "###); } @@ -149,12 +149,12 @@ async fn test_coalesce_two_timestamp_ns() { async fn test_coalesce_two_record() { insta::assert_snapshot!(QueryFixture::new("{ m: Times.m, n: Times.n, coalesce_times_times: coalesce(Times, Times) | $input.n }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,coalesce_times_times - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,4,2,2 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,3,4,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,5,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,11,23,23 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,4,2,2 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,3,4,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,5,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,11,23,23 "###); } diff --git a/crates/sparrow-main/tests/e2e/collect_tests.rs b/crates/sparrow-main/tests/e2e/collect_tests.rs new file mode 100644 index 000000000..b12a66e09 --- /dev/null +++ b/crates/sparrow-main/tests/e2e/collect_tests.rs @@ -0,0 +1,619 @@ +//! e2e tests for collect function + +use indoc::indoc; +use sparrow_api::kaskada::v1alpha::TableConfig; +use uuid::Uuid; + +use crate::{fixture::DataFixture, QueryFixture}; + +pub(crate) async fn collect_data_fixture() -> DataFixture { + DataFixture::new() + .with_table_from_csv( + TableConfig::new_with_table_source( + "Collect", + &Uuid::new_v4(), + "time", + Some("subsort"), + "key", + "", + ), + indoc! {" + time,subsort,key,s,n,b,index + 1996-12-19T16:39:57-08:00,0,A,hEllo,0,true,0 + 1996-12-19T16:40:57-08:00,0,A,hi,2,false,1 + 1996-12-19T16:41:57-08:00,0,A,hey,9,,2 + 1996-12-19T16:42:00-08:00,0,A,heylo,-7,false,2 + 1996-12-19T16:42:57-08:00,0,A,ay,-1,true,1 + 1996-12-19T16:43:57-08:00,0,A,hIlo,10,true, + 1996-12-20T16:40:57-08:00,0,B,h,5,false,0 + 1996-12-20T16:41:57-08:00,0,B,he,-2,,1 + 1996-12-20T16:42:57-08:00,0,B,,,true,2 + 1996-12-20T16:43:57-08:00,0,B,hel,2,false,1 + 1996-12-20T16:44:57-08:00,0,B,,,true,1 + 1996-12-20T17:44:57-08:00,0,B,hello,5,true,0 + 1996-12-21T16:44:57-08:00,0,C,g,1,true,2 + 1996-12-21T16:45:57-08:00,0,C,go,2,true,0 + 1996-12-21T16:46:57-08:00,0,C,goo,3,true, + 1996-12-21T16:47:57-08:00,0,C,good,4,true,1 + "}, + ) + .await + .unwrap() +} + +#[tokio::test] +async fn test_collect_with_null_max() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.n | collect(max = null) | index(0), f2: Collect.b | collect(max = null) | index(0), f3: Collect.s | collect(max = null) | index(0) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1,f2,f3 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,0,true,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,0,true,hEllo + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,0,true,hEllo + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,0,true,hEllo + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,0,true,hEllo + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,0,true,hEllo + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,5,false,h + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5,false,h + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B,5,false,h + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,5,false,h + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,5,false,h + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,5,false,h + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C,1,true,g + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,1,true,g + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C,1,true,g + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,1,true,g + "###); +} + +#[tokio::test] +async fn test_collect_to_list_i64() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.n | collect(max=10) | index(0) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,0 + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,0 + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,0 + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,0 + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,0 + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,0 + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C,1 + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,1 + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C,1 + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,1 + "###); +} + +#[tokio::test] +async fn test_collect_to_list_i64_dynamic() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.n | collect(max=10) | index(Collect.index) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,0 + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,9 + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,9 + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,-2 + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,-2 + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,-2 + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,1 + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,2 + "###); +} + +#[tokio::test] +async fn test_collect_to_small_list_i64() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.n | collect(max=2) | index(Collect.index) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,0 + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,-1 + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,-2 + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,2 + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2 + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,2 + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,1 + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,4 + "###); +} + +#[tokio::test] +async fn test_collect_to_list_string() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.s | collect(max=10) | index(0) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,hEllo + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,hEllo + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,hEllo + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,hEllo + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,hEllo + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,h + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,h + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B,h + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,h + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,h + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,h + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C,g + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,g + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C,g + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,g + "###); +} + +#[tokio::test] +async fn test_collect_to_list_string_dynamic() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.s | collect(max=10) | index(Collect.index) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,hi + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,hey + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,hey + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,hi + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,h + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,he + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,he + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,he + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,h + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,g + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,go + "###); +} + +#[tokio::test] +async fn test_collect_to_small_list_string() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.s | collect(max=2) | index(Collect.index) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,hi + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,ay + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,h + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,he + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,hel + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,g + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,good + "###); +} + +#[tokio::test] +async fn test_collect_to_list_boolean() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.b | collect(max=10) | index(0) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C,true + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,true + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C,true + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,true + "###); +} + +#[tokio::test] +async fn test_collect_to_list_boolean_dynamic() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.b | collect(max=10) | index(Collect.index) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,false + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,false + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,false + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,true + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,true + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,true + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,true + "###); +} + +#[tokio::test] +async fn test_collect_to_small_list_boolean() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.b | collect(max=2) | index(Collect.index) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,false + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,true + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,true + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,true + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C, + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,true + "###); +} + +#[tokio::test] +async fn test_collect_structs() { + insta::assert_snapshot!(QueryFixture::new("{ + s0: { s: Collect.s, n: Collect.n, b: Collect.b } | collect(max = null) | index(0) | $input.s, + s1: { s: Collect.s, n: Collect.n, b: Collect.b } | collect(max = null) | index(1) | $input.s, + s2: { s: Collect.s, n: Collect.n, b: Collect.b } | collect(max = null) | index(2) | $input.s, + s3: { s: Collect.s, n: Collect.n, b: Collect.b } | collect(max = null) | index(3) | $input.s, + s4: { s: Collect.s, n: Collect.n, b: Collect.b } | collect(max = null) | index(4) | $input.s + } + ").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,s0,s1,s2,s3,s4 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,,,, + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hi,,, + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hi,hey,, + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,hEllo,hi,hey,heylo, + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hi,hey,heylo,ay + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hi,hey,heylo,ay + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,h,,,, + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,h,he,,, + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B,h,he,,, + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,h,he,,hel, + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,h,he,,hel, + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,h,he,,hel, + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C,g,,,, + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,g,go,,, + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C,g,go,goo,, + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,g,go,goo,good, + "###); +} + +#[tokio::test] +async fn test_collect_with_minimum() { + insta::assert_snapshot!(QueryFixture::new("{ + min0: Collect.s | collect(max=10) | index(0), + min1: Collect.s | collect(min=2, max=10) | index(0), + min2: Collect.s | collect(min=3, max=10) | index(0) + }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,min0,min1,min2 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,, + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hEllo, + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hEllo,hEllo + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,hEllo,hEllo,hEllo + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hEllo,hEllo + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hEllo,hEllo + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,h,, + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,h,h, + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B,h,h,h + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,h,h,h + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,h,h,h + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,h,h,h + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C,g,, + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,g,g, + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C,g,g,g + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,g,g,g + "###); +} + +#[tokio::test] +async fn test_collect_structs_map() { + insta::assert_snapshot!(QueryFixture::new(" + let x = Collect | collect(max=10) | $input.s + in { x: x | index(0), y: x | index(1) } + ").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,x,y + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo, + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hi + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hi + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,hEllo,hi + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hi + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hi + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,h, + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,h,he + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B,h,he + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,h,he + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,h,he + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,h,he + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C,g, + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,g,go + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C,g,go + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,g,go + "###); +} + +#[tokio::test] +#[ignore = "unsupported via fenl"] +async fn test_collect_lists() { + insta::assert_snapshot!(QueryFixture::new(" + let x = Collect.s | collect(max=10) | collect(max=10) + in { x } + ").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,x,y + "###); +} + +#[tokio::test] +async fn test_collect_lag_equality() { + // Lag is implemented with collect now, so these _better_ be the same. + insta::assert_snapshot!(QueryFixture::new("{ + collect: Collect.n | collect(min=3, max=3) | index(0), + lag: Collect.n | lag(2) + }").with_dump_dot("asdf").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,collect,lag + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,0,0 + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,2,2 + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,9,9 + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,-7,-7 + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,5,5 + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,5,5 + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,-2,-2 + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C,, + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,, + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C,1,1 + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,2,2 + "###); +} + +#[tokio::test] +async fn test_collect_primitive_since_minutely() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.n | collect(max=10, window=since(minutely())) | index(0) | when(is_valid($input))}").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,0 + 1996-12-20T00:40:00.000000000,18446744073709551615,12960666915911099378,A,0 + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-20T00:41:00.000000000,18446744073709551615,12960666915911099378,A,2 + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,9 + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,9 + 1996-12-20T00:42:00.000000000,18446744073709551615,12960666915911099378,A,9 + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,-1 + 1996-12-20T00:43:00.000000000,18446744073709551615,12960666915911099378,A,-1 + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,10 + 1996-12-20T00:44:00.000000000,18446744073709551615,12960666915911099378,A,10 + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-21T00:41:00.000000000,18446744073709551615,2867199309159137213,B,5 + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,-2 + 1996-12-21T00:42:00.000000000,18446744073709551615,2867199309159137213,B,-2 + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,2 + 1996-12-21T00:44:00.000000000,18446744073709551615,2867199309159137213,B,2 + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-21T01:45:00.000000000,18446744073709551615,2867199309159137213,B,5 + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C,1 + 1996-12-22T00:45:00.000000000,18446744073709551615,2521269998124177631,C,1 + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,2 + 1996-12-22T00:46:00.000000000,18446744073709551615,2521269998124177631,C,2 + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C,3 + 1996-12-22T00:47:00.000000000,18446744073709551615,2521269998124177631,C,3 + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,4 + "###); +} + +#[tokio::test] +async fn test_collect_primitive_since_minutely_1() { + // Only two rows in this set exist within the same minute, hence these results when + // getting the second item. + insta::assert_snapshot!(QueryFixture::new("{ + f1: Collect.n | collect(max=10, window=since(minutely())) | index(1) | when(is_valid($input)), + f1_with_min: Collect.n | collect(min=3, max=10, window=since(minutely())) | index(1) | when(is_valid($input)) + }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1,f1_with_min + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,-7, + 1996-12-20T00:42:00.000000000,18446744073709551615,12960666915911099378,A,-7, + "###); +} + +#[tokio::test] +async fn test_collect_string_since_hourly() { + // note that `B` is empty because we collect `null` as a valid value in a list currently + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.s | collect(max=10, window=since(hourly())) | index(2) | when(is_valid($input)) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,hey + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,hey + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,hey + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,hey + 1996-12-20T01:00:00.000000000,18446744073709551615,12960666915911099378,A,hey + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-21T01:00:00.000000000,18446744073709551615,2867199309159137213,B, + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C,goo + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,goo + "###); +} + +#[tokio::test] +async fn test_collect_boolean_since_hourly() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Collect.b | collect(max=10, window=since(hourly())) | index(3) | when(is_valid($input)) }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T01:00:00.000000000,18446744073709551615,12960666915911099378,A,true + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,true + 1996-12-21T01:00:00.000000000,18446744073709551615,2867199309159137213,B,true + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,true + "###); +} + +#[tokio::test] +async fn test_collect_struct_since_hourly() { + // TODO: The results here are weird, because `collect` is latched. I don't think I'd expect + // the results we have here, but it's possible they're technically in line with what we expect + // given our continuity rules. We should revisit this. + // https://github.com/kaskada-ai/kaskada/issues/648 + insta::assert_snapshot!(QueryFixture::new("{ + b: Collect.b, + f0: ({b: Collect.b} | collect(max=10, window=since(hourly())) | index(0)).b | when(is_valid($input)), + f1: ({b: Collect.b} | collect(max=10, window=since(hourly())) | index(1)).b | when(is_valid($input)), + f2: ({b: Collect.b} | collect(max=10, window=since(hourly())) | index(2)).b | when(is_valid($input)), + f3: ({b: Collect.b} | collect(max=10, window=since(hourly())) | index(3)).b | when(is_valid($input)), + f4: ({b: Collect.b} | collect(max=10, window=since(hourly())) | index(4)).b | when(is_valid($input)) + }").run_to_csv(&collect_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,b,f0,f1,f2,f3,f4 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,true,,,, + 1996-12-20T00:40:57.000000000,9223372036854775808,12960666915911099378,A,false,true,false,,, + 1996-12-20T00:41:57.000000000,9223372036854775808,12960666915911099378,A,,true,false,,, + 1996-12-20T00:42:00.000000000,9223372036854775808,12960666915911099378,A,false,true,false,,false, + 1996-12-20T00:42:57.000000000,9223372036854775808,12960666915911099378,A,true,true,false,,false,true + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,true,true,false,,false,true + 1996-12-20T01:00:00.000000000,18446744073709551615,12960666915911099378,A,,true,false,,false,true + 1996-12-21T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,false,,,, + 1996-12-21T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,false,,,, + 1996-12-21T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,false,,true,, + 1996-12-21T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,false,,true,false, + 1996-12-21T00:44:57.000000000,9223372036854775808,2867199309159137213,B,true,false,,true,false,true + 1996-12-21T01:00:00.000000000,18446744073709551615,2867199309159137213,B,,false,,true,false,true + 1996-12-21T01:44:57.000000000,9223372036854775808,2867199309159137213,B,true,true,,true,false,true + 1996-12-21T02:00:00.000000000,18446744073709551615,2867199309159137213,B,,true,,true,false,true + 1996-12-22T00:44:57.000000000,9223372036854775808,2521269998124177631,C,true,true,,,, + 1996-12-22T00:45:57.000000000,9223372036854775808,2521269998124177631,C,true,true,true,,, + 1996-12-22T00:46:57.000000000,9223372036854775808,2521269998124177631,C,true,true,true,true,, + 1996-12-22T00:47:57.000000000,9223372036854775808,2521269998124177631,C,true,true,true,true,true, + "###); +} + +#[tokio::test] +async fn test_require_literal_max() { + // TODO: We should figure out how to not report the second error -- type variables with + // error propagation needs some fixing. + insta::assert_yaml_snapshot!(QueryFixture::new("{ f1: Collect.s | collect(max=Collect.index) | index(1) }") + .run_to_csv(&collect_data_fixture().await).await.unwrap_err(), @r###" + --- + code: Client specified an invalid argument + message: 2 errors in Fenl statements; see diagnostics + fenl_diagnostics: + - severity: error + code: E0014 + message: Invalid non-constant argument + formatted: + - "error[E0014]: Invalid non-constant argument" + - " --> Query:1:31" + - " |" + - "1 | { f1: Collect.s | collect(max=Collect.index) | index(1) }" + - " | ^^^^^^^^^^^^^ Argument 'max' to 'collect' must be constant, but was not" + - "" + - "" + - severity: error + code: E0010 + message: Invalid argument type(s) + formatted: + - "error[E0010]: Invalid argument type(s)" + - " --> Query:1:48" + - " |" + - "1 | { f1: Collect.s | collect(max=Collect.index) | index(1) }" + - " | ^^^^^ Invalid types for parameter 'list' in call to 'index'" + - " |" + - " --> internal:1:1" + - " |" + - 1 | $input + - " | ------ Actual type: error" + - " |" + - " --> built-in signature 'index(i: i64, list: list) -> T':1:29" + - " |" + - "1 | index(i: i64, list: list) -> T" + - " | ------- Expected type: list" + - "" + - "" + "###); +} + +#[tokio::test] +async fn test_require_literal_min() { + insta::assert_yaml_snapshot!(QueryFixture::new("{ f1: Collect.s | collect(min=Collect.index, max=10) | index(1) }") + .run_to_csv(&collect_data_fixture().await).await.unwrap_err(), @r###" + --- + code: Client specified an invalid argument + message: 2 errors in Fenl statements; see diagnostics + fenl_diagnostics: + - severity: error + code: E0014 + message: Invalid non-constant argument + formatted: + - "error[E0014]: Invalid non-constant argument" + - " --> Query:1:31" + - " |" + - "1 | { f1: Collect.s | collect(min=Collect.index, max=10) | index(1) }" + - " | ^^^^^^^^^^^^^ Argument 'min' to 'collect' must be constant, but was not" + - "" + - "" + - severity: error + code: E0010 + message: Invalid argument type(s) + formatted: + - "error[E0010]: Invalid argument type(s)" + - " --> Query:1:56" + - " |" + - "1 | { f1: Collect.s | collect(min=Collect.index, max=10) | index(1) }" + - " | ^^^^^ Invalid types for parameter 'list' in call to 'index'" + - " |" + - " --> internal:1:1" + - " |" + - 1 | $input + - " | ------ Actual type: error" + - " |" + - " --> built-in signature 'index(i: i64, list: list) -> T':1:29" + - " |" + - "1 | index(i: i64, list: list) -> T" + - " | ------- Expected type: list" + - "" + - "" + "###); +} + +#[tokio::test] +async fn test_min_must_be_lte_max() { + insta::assert_yaml_snapshot!(QueryFixture::new("{ f1: Collect.s | collect(min=10, max=0) | index(1) }") + .run_to_csv(&collect_data_fixture().await).await.unwrap_err(), @r###" + --- + code: Client specified an invalid argument + message: 1 errors in Fenl statements; see diagnostics + fenl_diagnostics: + - severity: error + code: E0002 + message: Illegal cast + formatted: + - "error[E0002]: Illegal cast" + - " --> Query:1:31" + - " |" + - "1 | { f1: Collect.s | collect(min=10, max=0) | index(1) }" + - " | ^^ min '10' must be less than or equal to max '0'" + - "" + - "" + "###); +} diff --git a/crates/sparrow-main/tests/e2e/comparison_tests.rs b/crates/sparrow-main/tests/e2e/comparison_tests.rs index 4f39f074f..5255214aa 100644 --- a/crates/sparrow-main/tests/e2e/comparison_tests.rs +++ b/crates/sparrow-main/tests/e2e/comparison_tests.rs @@ -8,12 +8,12 @@ use crate::QueryFixture; async fn test_lt_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, lt: Numbers.m < Numbers.n }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,lt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -21,12 +21,12 @@ async fn test_lt_i64() { async fn test_lt_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, lt: Numbers.m < Numbers.n}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,lt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -34,12 +34,12 @@ async fn test_lt_f64() { async fn test_lt_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ m: Times.m, n: Times.n, lt: (Times.m as timestamp_ns) < (Times.n as timestamp_ns) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,lt - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,4,2,false - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,3,4,true - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,5, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8,false - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,11,23,true + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,4,2,false + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,3,4,true + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,5, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8,false + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,11,23,true "###); } @@ -47,12 +47,12 @@ async fn test_lt_timestamp_ns() { async fn test_lt_i64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, lt: Numbers.m < 10}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,lt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,false - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,false + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -60,12 +60,12 @@ async fn test_lt_i64_literal() { async fn test_lt_f64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, lt: Numbers.m < 10.0}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,lt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,false - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,false + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -76,12 +76,12 @@ async fn test_lt_timestamp_ns_literal() { let literal = \"1970-01-01T00:00:00.000000010Z\" in { m_time, literal_time: literal as timestamp_ns, lt: m_time < literal}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m_time,literal_time,lt - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000010,true - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000010,true - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000010, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000010, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000010,true - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000010,false + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000010,true + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000010,true + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000010, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000010, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000010,true + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000010,false "###); } @@ -89,12 +89,12 @@ async fn test_lt_timestamp_ns_literal() { async fn test_gt_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, gt: Numbers.m > Numbers.n }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,gt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -102,12 +102,12 @@ async fn test_gt_i64() { async fn test_gt_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, gt: Numbers.m > Numbers.n}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,gt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -115,12 +115,12 @@ async fn test_gt_f64() { async fn test_gt_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ m: Times.m, n: Times.n, gt: (Times.m as timestamp_ns) > (Times.n as timestamp_ns) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,gt - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,4,2,true - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,3,4,false - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,5, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8,false - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,11,23,false + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,4,2,true + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,3,4,false + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,5, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8,false + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,11,23,false "###); } @@ -128,12 +128,12 @@ async fn test_gt_timestamp_ns() { async fn test_gt_i64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, lt: Numbers.m > 10}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,lt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -141,12 +141,12 @@ async fn test_gt_i64_literal() { async fn test_gt_f64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, lt: Numbers.m > 10.0}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,lt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -157,12 +157,12 @@ async fn test_gt_timestamp_ns_literal() { let literal = \"1970-01-01T00:00:00.000000010Z\" in { m_time, literal_time: literal as timestamp_ns, gt: m_time > literal}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m_time,literal_time,gt - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000010,false - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000010,false - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000010, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000010, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000010,false - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000010,true + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000010,false + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000010,false + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000010, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000010, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000010,false + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000010,true "###); } @@ -170,12 +170,12 @@ async fn test_gt_timestamp_ns_literal() { async fn test_lte_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, lte: Numbers.m <= Numbers.n }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,lte - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -183,12 +183,12 @@ async fn test_lte_i64() { async fn test_lte_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, lte: Numbers.m <= Numbers.n}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,lte - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -196,12 +196,12 @@ async fn test_lte_f64() { async fn test_lte_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ m: Times.m, n: Times.n, lte: (Times.m as timestamp_ns) <= (Times.n as timestamp_ns) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,lte - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,4,2,false - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,3,4,true - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,5, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8,true - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,11,23,true + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,4,2,false + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,3,4,true + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,5, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8,true + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,11,23,true "###); } @@ -209,12 +209,12 @@ async fn test_lte_timestamp_ns() { async fn test_lte_i64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, lt: Numbers.m <= 10}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,lt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,false - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,false + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -222,12 +222,12 @@ async fn test_lte_i64_literal() { async fn test_lte_f64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, lt: Numbers.m <= 10.0}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,lt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,false - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,false + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -238,12 +238,12 @@ async fn test_lte_timestamp_ns_literal() { let literal = \"1970-01-01T00:00:00.000000008Z\" in { m_time, literal_time: literal as timestamp_ns, lte: m_time <= literal}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m_time,literal_time,lte - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000008,true - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000008,true - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000008, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000008, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000008,true - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000008,false + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000008,true + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000008,true + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000008, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000008, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000008,true + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000008,false "###); } @@ -251,12 +251,12 @@ async fn test_lte_timestamp_ns_literal() { async fn test_gte_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, gte: Numbers.m >= Numbers.n }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,gte - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -264,12 +264,12 @@ async fn test_gte_i64() { async fn test_gte_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, gte: Numbers.m >= Numbers.n}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,gte - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -277,12 +277,12 @@ async fn test_gte_f64() { async fn test_gte_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ m: Times.m, n: Times.n, gte: Times.m >= Times.n}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,gte - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,4,2,true - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,3,4,false - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,5, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8,true - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,11,23,false + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,4,2,true + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,3,4,false + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,5, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8,true + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,11,23,false "###); } @@ -290,12 +290,12 @@ async fn test_gte_timestamp_ns() { async fn test_gte_i64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, lt: Numbers.m >= 10}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,lt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -303,12 +303,12 @@ async fn test_gte_i64_literal() { async fn test_gte_f64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, lt: Numbers.m >= 10.0}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,lt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -319,11 +319,11 @@ async fn test_gte_timestamp_ns_literal() { let literal = \"1970-01-01T00:00:00.000000008Z\" in { m_time, literal_time: literal as timestamp_ns, gte: m_time >= literal}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m_time,literal_time,gte - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000008,false - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000008,false - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000008, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000008, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000008,true - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000008,true + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000008,false + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000008,false + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000008, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000008, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000008,true + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000008,true "###); } diff --git a/crates/sparrow-main/tests/e2e/decoration_tests.rs b/crates/sparrow-main/tests/e2e/decoration_tests.rs index 896a55742..d2d0040ef 100644 --- a/crates/sparrow-main/tests/e2e/decoration_tests.rs +++ b/crates/sparrow-main/tests/e2e/decoration_tests.rs @@ -14,12 +14,12 @@ use crate::QueryFixture; async fn test_last_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ last: last(Times.n)}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,5 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23 "###); } @@ -27,8 +27,8 @@ async fn test_last_timestamp_ns() { async fn test_last_timestamp_ns_finished() { insta::assert_snapshot!(QueryFixture::new("{ last: last(Times.n) }").with_final_results().run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 2004-12-06T00:44:57.000000001,18446744073709551615,3650215962958587783,A,2 - 2004-12-06T00:44:57.000000001,18446744073709551615,11753611437813598533,B,23 + 2004-12-06T00:44:57.000000001,18446744073709551615,2867199309159137213,B,23 + 2004-12-06T00:44:57.000000001,18446744073709551615,12960666915911099378,A,2 "###); } @@ -46,11 +46,11 @@ async fn test_last_timestamp_ns_changed_since() { let changed_since = NaiveDateTime::new(date_for_test(1995, 1, 1), time_for_test(0, 0, 0)); insta::assert_snapshot!(QueryFixture::new("{ last: last(Times.n) }").with_changed_since(changed_since).run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,5 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23 "###); } @@ -62,7 +62,7 @@ async fn test_last_timestamp_ns_changed_since_finished() { let changed_since = NaiveDateTime::new(date_for_test(1995, 1, 1), time_for_test(0, 0, 0)); insta::assert_snapshot!(QueryFixture::new("{ key: Times.key, last: last(Times.n), last_time: last(Times.time) }").with_changed_since(changed_since).with_final_results().run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,key,last,last_time - 2004-12-06T00:44:57.000000001,18446744073709551615,11753611437813598533,B,B,23,2004-12-06T00:44:57.000000000 + 2004-12-06T00:44:57.000000001,18446744073709551615,2867199309159137213,B,B,23,2004-12-06T00:44:57.000000000 "###); } @@ -74,9 +74,9 @@ async fn test_last_timestamp_ns_changed_since_equal_to_event_time() { let changed_since = NaiveDateTime::new(date_for_test(1997, 12, 12), time_for_test(0, 42, 57)); insta::assert_snapshot!(QueryFixture::new("{ last: last(Times.n) }").with_changed_since(changed_since).run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,5 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23 "###); } @@ -88,13 +88,13 @@ async fn test_last_timestamp_ns_windowed_changed_since() { let changed_since = NaiveDateTime::new(date_for_test(2001, 12, 12), time_for_test(0, 42, 57)); insta::assert_snapshot!(QueryFixture::new("{ last: last(Times.n, window=since(yearly())) }").with_changed_since(changed_since).run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 2002-01-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A, - 2002-01-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B, - 2003-01-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A, - 2003-01-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B, - 2004-01-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A, - 2004-01-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23 + 2002-01-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B, + 2002-01-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A, + 2003-01-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B, + 2003-01-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A, + 2004-01-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B, + 2004-01-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23 "###); } @@ -106,8 +106,8 @@ async fn test_last_timestamp_ns_windowed_changed_since_finished() { let changed_since = NaiveDateTime::new(date_for_test(2001, 12, 12), time_for_test(0, 42, 57)); insta::assert_snapshot!(QueryFixture::new("{ last: last(Times.n, window=since(yearly())) }").with_changed_since(changed_since).with_final_results().run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 2004-12-06T00:44:57.000000001,18446744073709551615,3650215962958587783,A, - 2004-12-06T00:44:57.000000001,18446744073709551615,11753611437813598533,B,23 + 2004-12-06T00:44:57.000000001,18446744073709551615,2867199309159137213,B,23 + 2004-12-06T00:44:57.000000001,18446744073709551615,12960666915911099378,A, "###); } @@ -133,7 +133,7 @@ async fn test_last_timestamp_ns_changed_since_final_expect_filtered_results() { .with_changed_since(changed_since) .with_final_results().run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,key,time,last - 2004-12-06T00:44:57.000000001,18446744073709551615,11753611437813598533,B,B,2004-12-06T00:44:57.000000000,23 + 2004-12-06T00:44:57.000000001,18446744073709551615,2867199309159137213,B,B,2004-12-06T00:44:57.000000000,23 "###); } @@ -145,7 +145,7 @@ async fn test_last_timestamp_ns_changed_since_expect_filtered_results() { insta::assert_snapshot!(QueryFixture::new(FILTERED_RESULTS) .with_changed_since(changed_since).run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,key,time,last - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,B,2004-12-06T00:44:57.000000000,23 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,B,2004-12-06T00:44:57.000000000,23 "###); } @@ -158,7 +158,7 @@ async fn test_last_timestamp_ns_final_expect_filtered_results() { insta::assert_snapshot!(QueryFixture::new(FILTERED_RESULTS) .with_final_results().run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,key,time,last - 2004-12-06T00:44:57.000000001,18446744073709551615,11753611437813598533,B,B,2004-12-06T00:44:57.000000000,23 + 2004-12-06T00:44:57.000000001,18446744073709551615,2867199309159137213,B,B,2004-12-06T00:44:57.000000000,23 "###); } @@ -168,11 +168,11 @@ async fn test_last_timestamp_filtered_results() { insta::assert_snapshot!(QueryFixture::new(FILTERED_RESULTS).run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,key,time,last - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,B,1995-10-20T00:40:57.000000000,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,B,1996-08-20T00:41:57.000000000,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,B,1997-12-12T00:42:57.000000000,5 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,B,1998-12-13T00:43:57.000000000,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,B,2004-12-06T00:44:57.000000000,23 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,B,1995-10-20T00:40:57.000000000,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,B,1996-08-20T00:41:57.000000000,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,B,1997-12-12T00:42:57.000000000,5 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,B,1998-12-13T00:43:57.000000000,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,B,2004-12-06T00:44:57.000000000,23 "###); } @@ -183,14 +183,14 @@ async fn test_final_equivalent_to_changed_since_zero() { let changed_since = NaiveDateTime::from_timestamp_opt(0, 0).unwrap(); insta::assert_snapshot!(QueryFixture::new("{ last: last(Times.n, window=since(yearly())) }").with_final_results().with_changed_since(changed_since).run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 2004-12-06T00:44:57.000000001,18446744073709551615,3650215962958587783,A, - 2004-12-06T00:44:57.000000001,18446744073709551615,11753611437813598533,B,23 + 2004-12-06T00:44:57.000000001,18446744073709551615,2867199309159137213,B,23 + 2004-12-06T00:44:57.000000001,18446744073709551615,12960666915911099378,A, "###); insta::assert_snapshot!(QueryFixture::new("{ last: last(Times.n, window=since(yearly())) }").with_final_results().run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 2004-12-06T00:44:57.000000001,18446744073709551615,3650215962958587783,A, - 2004-12-06T00:44:57.000000001,18446744073709551615,11753611437813598533,B,23 + 2004-12-06T00:44:57.000000001,18446744073709551615,2867199309159137213,B,23 + 2004-12-06T00:44:57.000000001,18446744073709551615,12960666915911099378,A, "###); } @@ -199,8 +199,8 @@ async fn test_sum_i64_final_at_time() { let datetime = NaiveDateTime::new(date_for_test(1996, 12, 20), time_for_test(0, 39, 58)); insta::assert_snapshot!(QueryFixture::new("{ sum_field: sum(Numbers.m) }").with_final_results_at_time(datetime).run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_field - 1996-12-20T00:39:58.000000001,18446744073709551615,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000001,18446744073709551615,11753611437813598533,B,24 + 1996-12-20T00:39:58.000000001,18446744073709551615,2867199309159137213,B,24 + 1996-12-20T00:39:58.000000001,18446744073709551615,12960666915911099378,A,5 "###); } @@ -258,7 +258,7 @@ async fn test_last_timestamp_ns_changed_since_with_final_at_time() { let final_time = NaiveDateTime::new(date_for_test(2000, 1, 1), time_for_test(0, 0, 0)); insta::assert_snapshot!(QueryFixture::new("{ last: last(Times.n) }").with_changed_since(changed_since).with_final_results_at_time(final_time).run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 2000-01-01T00:00:00.000000001,18446744073709551615,11753611437813598533,B,8 + 2000-01-01T00:00:00.000000001,18446744073709551615,2867199309159137213,B,8 "###); } @@ -268,7 +268,7 @@ async fn test_final_at_time_past_input_times() { let final_time = NaiveDateTime::new(date_for_test(2020, 1, 1), time_for_test(0, 0, 0)); insta::assert_snapshot!(QueryFixture::new("{ last: last(Times.n) }").with_final_results_at_time(final_time).run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last - 2020-01-01T00:00:00.000000001,18446744073709551615,3650215962958587783,A,2 - 2020-01-01T00:00:00.000000001,18446744073709551615,11753611437813598533,B,23 + 2020-01-01T00:00:00.000000001,18446744073709551615,2867199309159137213,B,23 + 2020-01-01T00:00:00.000000001,18446744073709551615,12960666915911099378,A,2 "###); } diff --git a/crates/sparrow-main/tests/e2e/entity_key_output_tests.rs b/crates/sparrow-main/tests/e2e/entity_key_output_tests.rs index 6b9301777..571d24300 100644 --- a/crates/sparrow-main/tests/e2e/entity_key_output_tests.rs +++ b/crates/sparrow-main/tests/e2e/entity_key_output_tests.rs @@ -57,12 +57,12 @@ pub(crate) async fn multiple_table_fixture() -> DataFixture { async fn test_entity_keys_numbers() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } @@ -70,18 +70,18 @@ async fn test_entity_keys_numbers() { async fn test_multiple_tables_entity_keys() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers2.n }").run_to_csv(&multiple_table_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:03.000000000,9223372036854775808,9192031977313001967,C,,10.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,11430173353997062025,D,,3.9 - 1996-12-20T00:40:05.000000000,9223372036854775808,9192031977313001967,C,,6.2 - 1996-12-20T00:40:06.000000000,9223372036854775808,9192031977313001967,C,,9.25 - 1996-12-20T00:40:07.000000000,9223372036854775808,9192031977313001967,C,, - 1996-12-20T00:40:08.000000000,9223372036854775808,9192031977313001967,C,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:03.000000000,9223372036854775808,2521269998124177631,C,,10.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,1021973589662386405,D,,3.9 + 1996-12-20T00:40:05.000000000,9223372036854775808,2521269998124177631,C,,6.2 + 1996-12-20T00:40:06.000000000,9223372036854775808,2521269998124177631,C,,9.25 + 1996-12-20T00:40:07.000000000,9223372036854775808,2521269998124177631,C,, + 1996-12-20T00:40:08.000000000,9223372036854775808,2521269998124177631,C,, "###); } @@ -90,12 +90,12 @@ async fn test_lookup_entity_keys() { insta::assert_snapshot!(QueryFixture::new("{ m: lookup(Numbers.key, sum(Numbers2.n)) }").run_to_csv(&multiple_table_fixture().await).await .unwrap(), @r###" _time,_subsort,_key_hash,_key,m - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } @@ -103,12 +103,12 @@ async fn test_lookup_entity_keys() { async fn test_with_key() { insta::assert_snapshot!(QueryFixture::new("Numbers | with_key($input.n, grouping='other_key')").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,m,n - 1996-12-20T00:39:57.000000000,9223372036854775808,688866638019693713,10,1996-12-20T00:39:57.000000000,0,A,5,10 - 1996-12-20T00:39:58.000000000,9223372036854775808,14956259290599888306,3,1996-12-20T00:39:58.000000000,0,B,24,3 - 1996-12-20T00:39:59.000000000,9223372036854775808,6303088930243614480,6,1996-12-20T00:39:59.000000000,0,A,17,6 - 1996-12-20T00:40:00.000000000,9223372036854775808,2122274938272070218,9,1996-12-20T00:40:00.000000000,0,A,,9 - 1996-12-20T00:40:01.000000000,9223372036854775808,11832085162654999889,,1996-12-20T00:40:01.000000000,0,A,12, - 1996-12-20T00:40:02.000000000,9223372036854775808,11832085162654999889,,1996-12-20T00:40:02.000000000,0,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,10780876405615667760,10,1996-12-20T00:39:57.000000000,0,A,5,10 + 1996-12-20T00:39:58.000000000,9223372036854775808,5496774745203840792,3,1996-12-20T00:39:58.000000000,0,B,24,3 + 1996-12-20T00:39:59.000000000,9223372036854775808,1360592371395427998,6,1996-12-20T00:39:59.000000000,0,A,17,6 + 1996-12-20T00:40:00.000000000,9223372036854775808,15653042715643359010,9,1996-12-20T00:40:00.000000000,0,A,,9 + 1996-12-20T00:40:01.000000000,9223372036854775808,0,,1996-12-20T00:40:01.000000000,0,A,12, + 1996-12-20T00:40:02.000000000,9223372036854775808,0,,1996-12-20T00:40:02.000000000,0,A,, "###); } @@ -117,11 +117,11 @@ async fn test_lookup_with_key_entity_keys() { insta::assert_snapshot!(QueryFixture::new("{ m: lookup(Numbers.key, with_key(Numbers.key, sum(Numbers.m))) }").run_to_csv(&multiple_table_fixture().await).await .unwrap(), @r###" _time,_subsort,_key_hash,_key,m - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,22.8 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,22.8 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,35.2 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,35.2 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,22.8 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,22.8 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,35.2 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,35.2 "###); } diff --git a/crates/sparrow-main/tests/e2e/equality_tests.rs b/crates/sparrow-main/tests/e2e/equality_tests.rs index 2f6962de5..7b59c449d 100644 --- a/crates/sparrow-main/tests/e2e/equality_tests.rs +++ b/crates/sparrow-main/tests/e2e/equality_tests.rs @@ -11,12 +11,12 @@ use crate::QueryFixture; async fn test_eq_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, eq: Numbers.m == Numbers.n }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -24,12 +24,12 @@ async fn test_eq_i64() { async fn test_eq_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, eq: Numbers.m == Numbers.n }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -37,12 +37,12 @@ async fn test_eq_f64() { async fn test_eq_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ m: Times.m, n: Times.n, eq: (Times.m as timestamp_ns) == (Times.n as timestamp_ns) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,eq - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,4,2,false - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,3,4,false - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,5, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8,true - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,11,23,false + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,4,2,false + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,3,4,false + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,5, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8,true + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,11,23,false "###); } @@ -50,13 +50,13 @@ async fn test_eq_timestamp_ns() { async fn test_eq_boolean() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, b: Booleans.b, eq: Booleans.a == Booleans.b }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,b,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,true,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,false,true - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,true, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,false,false - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,true,false - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,, - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,true,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,false,true + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,true, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,false,false + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,true,false + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,, + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,,, "###); } @@ -64,12 +64,12 @@ async fn test_eq_boolean() { async fn test_eq_string() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, t: Strings.t, eq: Strings.s == Strings.t }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,t,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo,hEllo,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World,world,false - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world,hello world,true - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,greetings,false - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,salutations,false - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye,,false + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hEllo,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World,world,false + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world,hello world,true + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,greetings,false + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,salutations,false + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye,,false "###); } @@ -91,12 +91,12 @@ async fn test_eq_record() { async fn test_eq_i64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, eq: Numbers.m == 10 }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,false - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,false + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -104,12 +104,12 @@ async fn test_eq_i64_literal() { async fn test_eq_i64_literal_converse() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, eq: 10 == Numbers.m }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,false - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,false + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -117,12 +117,12 @@ async fn test_eq_i64_literal_converse() { async fn test_eq_i64_literal_null() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, eq: Numbers.m == null }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -130,12 +130,12 @@ async fn test_eq_i64_literal_null() { async fn test_eq_i64_literal_null_converse() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, eq: null == Numbers.m }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -143,12 +143,12 @@ async fn test_eq_i64_literal_null_converse() { async fn test_eq_f64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, eq: Numbers.m == 24.3 }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,false - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,false + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -156,12 +156,12 @@ async fn test_eq_f64_literal() { async fn test_eq_f64_literal_zero() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, eq: Numbers.m == 0.0 }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,false - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,false - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,false + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,false + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -172,12 +172,12 @@ async fn test_eq_timestamp_ns_literal() { let literal = \"1970-01-01T00:00:00.000000008Z\" in { m_time, literal_time: literal as timestamp_ns, lt: m_time == literal}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m_time,literal_time,lt - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000008,false - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000008,false - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000008, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000008, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000008,true - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000008,false + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000008,false + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000008,false + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000008, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000008, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000008,true + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000008,false "###); } @@ -185,13 +185,13 @@ async fn test_eq_timestamp_ns_literal() { async fn test_eq_boolean_literal() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, eq: Booleans.a == true }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,false - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,false - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,false - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,false + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,false + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,false + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,, "###); } @@ -199,13 +199,13 @@ async fn test_eq_boolean_literal() { async fn test_eq_boolean_literal_null() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, eq: Booleans.a == null }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true, - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true, + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,, "###); } @@ -213,13 +213,13 @@ async fn test_eq_boolean_literal_null() { async fn test_eq_boolean_literal_converse() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, eq: true == Booleans.a }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,false - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,false - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,false - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,false + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,false + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,false + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,, "###); } @@ -227,13 +227,13 @@ async fn test_eq_boolean_literal_converse() { async fn test_eq_boolean_literal_null_converse() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, eq: null == Booleans.a }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true, - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true, + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,, "###); } @@ -241,12 +241,12 @@ async fn test_eq_boolean_literal_null_converse() { async fn test_eq_string_literal() { insta::assert_snapshot!(QueryFixture::new("let eq = Strings.s == \"hello world\" in { s: Strings.s, eq, is_valid: is_valid(eq)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,eq,is_valid - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo,false,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World,false,true - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world,true,true - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,false,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,false,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye,false,true + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,false,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World,false,true + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world,true,true + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,false,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,false,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye,false,true "###); } @@ -254,12 +254,12 @@ async fn test_eq_string_literal() { async fn test_eq_string_literal_converse() { insta::assert_snapshot!(QueryFixture::new("let eq = \"hello world\" == Strings.s in { s: Strings.s, eq, is_valid: is_valid(eq)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,eq,is_valid - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo,false,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World,false,true - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world,true,true - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,false,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,false,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye,false,true + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,false,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World,false,true + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world,true,true + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,false,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,false,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye,false,true "###); } @@ -267,12 +267,12 @@ async fn test_eq_string_literal_converse() { async fn test_eq_string_literal_null() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, eq: Strings.s == null }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo, - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo, + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye, "###); } @@ -280,12 +280,12 @@ async fn test_eq_string_literal_null() { async fn test_eq_string_literal_null_converse() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, eq: null == Strings.s }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,eq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo, - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo, + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye, "###); } @@ -307,12 +307,12 @@ async fn test_eq_record_literal() { async fn test_neq_i64_old() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, neq: Numbers.m <> Numbers.n }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -320,12 +320,12 @@ async fn test_neq_i64_old() { async fn test_neq_i64_new() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, neq: Numbers.m != Numbers.n }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -333,12 +333,12 @@ async fn test_neq_i64_new() { async fn test_neq_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, neq: Numbers.m <> Numbers.n}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -346,12 +346,12 @@ async fn test_neq_f64() { async fn test_neq_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ m: Times.m, n: Times.n, neq: (Times.m as timestamp_ns) <> (Times.n as timestamp_ns) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,neq - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,4,2,true - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,3,4,true - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,5, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8,false - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,11,23,true + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,4,2,true + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,3,4,true + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,5, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8,false + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,11,23,true "###); } @@ -359,13 +359,13 @@ async fn test_neq_timestamp_ns() { async fn test_neq_boolean() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, b: Booleans.b, neq: Booleans.a <> Booleans.b}").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,b,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,true,false - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,false,false - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,true, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,false,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,true,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,, - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,true,false + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,false,false + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,true, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,false,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,true,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,, + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,,, "###); } @@ -373,12 +373,12 @@ async fn test_neq_boolean() { async fn test_neq_string() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, t: Strings.t, neq: Strings.s <> Strings.t}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,t,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo,hEllo,false - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World,world,true - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world,hello world,false - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,greetings,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,salutations,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye,,true + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hEllo,false + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World,world,true + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world,hello world,false + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,greetings,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,salutations,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye,,true "###); } @@ -400,12 +400,12 @@ async fn test_neq_record() { async fn test_neq_i64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, neq: Numbers.m <> 5 }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -413,12 +413,12 @@ async fn test_neq_i64_literal() { async fn test_neq_i64_literal_converse() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, neq: 5 != Numbers.m }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -426,12 +426,12 @@ async fn test_neq_i64_literal_converse() { async fn test_neq_i64_literal_null() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, neq: Numbers.m != null }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -439,12 +439,12 @@ async fn test_neq_i64_literal_null() { async fn test_neq_i64_literal_null_converse() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, neq: null != Numbers.m }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -452,12 +452,12 @@ async fn test_neq_i64_literal_null_converse() { async fn test_neq_f64_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, neq: Numbers.m <> 5.2}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,false - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,false + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -468,12 +468,12 @@ async fn test_neq_timestamp_ns_literal() { let literal = \"1970-01-01T00:00:00.000000008Z\" in { m_time, literal_time: literal as timestamp_ns, neq: m_time <> literal}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m_time,literal_time,neq - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000008,true - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000008,true - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000008, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,1970-01-01T00:00:00.000000008, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000008,false - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000008,true + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1970-01-01T00:00:00.000000004,1970-01-01T00:00:00.000000008,true + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000003,1970-01-01T00:00:00.000000008,true + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000008, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,1970-01-01T00:00:00.000000008, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000008,1970-01-01T00:00:00.000000008,false + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1970-01-01T00:00:00.000000011,1970-01-01T00:00:00.000000008,true "###); } @@ -481,13 +481,13 @@ async fn test_neq_timestamp_ns_literal() { async fn test_neq_boolean_literal() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, neq: Booleans.a <> true }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,false - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,false - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,false + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,false + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,, "###); } @@ -495,13 +495,13 @@ async fn test_neq_boolean_literal() { async fn test_neq_boolean_literal_converse() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, neq: true != Booleans.a }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,false - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,false - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,false + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,false + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,, "###); } @@ -509,13 +509,13 @@ async fn test_neq_boolean_literal_converse() { async fn test_neq_boolean_literal_null() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, neq: Booleans.a <> null }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true, - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true, + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,, "###); } @@ -523,13 +523,13 @@ async fn test_neq_boolean_literal_null() { async fn test_neq_boolean_literal_null_converse() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, neq: null != Booleans.a }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true, - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true, + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,, "###); } @@ -537,12 +537,12 @@ async fn test_neq_boolean_literal_null_converse() { async fn test_neq_string_literal() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, neq: Strings.s <> \"hello world\" }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World,true - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world,false - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye,true + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World,true + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world,false + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye,true "###); } @@ -550,12 +550,12 @@ async fn test_neq_string_literal() { async fn test_neq_string_literal_converse() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, neq: \"hello world\" != Strings.s }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World,true - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world,false - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye,true + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World,true + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world,false + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye,true "###); } @@ -563,12 +563,12 @@ async fn test_neq_string_literal_converse() { async fn test_neq_string_literal_null() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, neq: Strings.s <> null }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo, - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo, + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye, "###); } @@ -576,12 +576,12 @@ async fn test_neq_string_literal_null() { async fn test_neq_string_literal_null_converse() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, neq: null != Strings.s }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,neq - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo, - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo, + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye, "###); } diff --git a/crates/sparrow-main/tests/e2e/fixture/query_fixture.rs b/crates/sparrow-main/tests/e2e/fixture/query_fixture.rs index e65ff9bb9..3b57daf34 100644 --- a/crates/sparrow-main/tests/e2e/fixture/query_fixture.rs +++ b/crates/sparrow-main/tests/e2e/fixture/query_fixture.rs @@ -270,7 +270,6 @@ impl QueryFixture { plan: Some(plan), destination: Some(output_to), tables: data.tables(), - ..self.execute_request.clone() }; diff --git a/crates/sparrow-main/tests/e2e/formula_tests.rs b/crates/sparrow-main/tests/e2e/formula_tests.rs index 652775531..f515feedb 100644 --- a/crates/sparrow-main/tests/e2e/formula_tests.rs +++ b/crates/sparrow-main/tests/e2e/formula_tests.rs @@ -7,12 +7,12 @@ async fn test_formulas_out_of_order() { .with_formula("n_plus_m", "numbers_m + Numbers.n") .with_formula("numbers_m", "Numbers.m").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,add - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,15 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,27 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,23 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,15 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,27 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,23 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -103,11 +103,11 @@ async fn test_unused_formula_does_not_report_query_error() { insta::assert_snapshot!(QueryFixture::new("{ n: Numbers.n } ") .with_formula("Invalid", "Numbers.n + $$").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } diff --git a/crates/sparrow-main/tests/e2e/general_tests.rs b/crates/sparrow-main/tests/e2e/general_tests.rs index 6e51dfc7a..93c60b075 100644 --- a/crates/sparrow-main/tests/e2e/general_tests.rs +++ b/crates/sparrow-main/tests/e2e/general_tests.rs @@ -11,12 +11,12 @@ use crate::QueryFixture; async fn test_is_valid_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, is_valid: is_valid(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,is_valid - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,false - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,false + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,false + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,false "###); } @@ -24,12 +24,12 @@ async fn test_is_valid_i64() { async fn test_is_valid_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, is_valid: is_valid(Numbers.m)}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,is_valid - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,false - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,false + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,false + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,false "###); } @@ -37,12 +37,12 @@ async fn test_is_valid_f64() { async fn test_is_valid_string() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, is_valid: is_valid(Strings.s)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,is_valid - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World,true - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world,true - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye,true + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World,true + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world,true + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye,true "###); } @@ -50,13 +50,13 @@ async fn test_is_valid_string() { async fn test_is_valid_boolean() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, is_valid: is_valid(Booleans.a)}").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,is_valid - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,false - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,,false + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,false + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,,false "###); } @@ -64,12 +64,12 @@ async fn test_is_valid_boolean() { async fn test_is_valid_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ n: Times.n, is_valid: is_valid(Times.n)}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,is_valid - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2,true - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4,true - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5,true - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,false - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,true - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23,true + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2,true + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4,true + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5,true + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,false + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,true + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23,true "###); } @@ -77,12 +77,12 @@ async fn test_is_valid_timestamp_ns() { async fn test_is_valid_record() { insta::assert_snapshot!(QueryFixture::new("{ is_valid: is_valid(Times)}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,is_valid - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,true - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,true - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,true - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,true + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,true + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,true + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,true + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,true "###); } @@ -90,12 +90,12 @@ async fn test_is_valid_record() { async fn test_hash_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, hash: hash(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,hash - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10021492687541564645 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,9175685813237050681 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,650022633471272026 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,11832085162654999889 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,17018031324644251917 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,11832085162654999889 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,16461383214845928621 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,11274228027825807126 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,322098188319043992 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,0 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,2287927947190353380 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,0 "###); } @@ -103,12 +103,25 @@ async fn test_hash_i64() { async fn test_hash_string() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, hash: hash(Strings.s)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,hash - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo,7780135985717684634 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World,14176767044433571390 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world,5434496457578885363 - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,8429509363638065888 - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,8429509363638065888 - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye,4935722505451517653 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,7011413575603941612 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World,13226470954278774291 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world,10229417672155185436 + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,5663277146615294718 + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,5663277146615294718 + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye,12405021407607093536 + "###); +} + +#[tokio::test] +async fn test_hash_struct() { + insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, hash: hash({m: Numbers.m})}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,m,hash + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,328624516884178922 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,11318067407944751383 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,11917632967804650977 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,0 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,10866357751204891869 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,0 "###); } @@ -116,13 +129,13 @@ async fn test_hash_string() { async fn test_hash_boolean() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, hash: hash(Booleans.a)}").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,hash - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,2359047937476779835 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,14253486467890685049 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,11832085162654999889 - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,2359047937476779835 - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,14253486467890685049 - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,14253486467890685049 - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,,11832085162654999889 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,18433805721903975440 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,11832085162654999889 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,0 + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,18433805721903975440 + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,11832085162654999889 + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,11832085162654999889 + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,,0 "###); } @@ -130,12 +143,12 @@ async fn test_hash_boolean() { async fn test_hash_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ n: Times.n, hash: hash(Times.n)}").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,hash - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2,1575016611515860288 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4,11820145550582457114 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5,10021492687541564645 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,11832085162654999889 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,4864632034659211723 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23,6336016281945450652 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2694864431690786590 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4,17062639839782733832 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5,16461383214845928621 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,0 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,6794973171266502674 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23,5700754754056540783 "###); } @@ -150,7 +163,7 @@ async fn test_hash_record() { async fn test_basic_limit_rows_to_1() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, is_valid: is_valid(Numbers.m)}").with_preview_rows(1).run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,is_valid - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,true + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,true "###); } @@ -160,12 +173,12 @@ async fn test_basic_limit_rows_to_1() { async fn test_basic_limit_rows_all() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, is_valid: is_valid(Numbers.m)}").with_preview_rows(100).run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,is_valid - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,false - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,false + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,false + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,false "###); } @@ -175,7 +188,7 @@ async fn test_constant_evaluation_preserves_types() { // equate null and booleans. insta::assert_snapshot!(QueryFixture::new("{ m1: if(false, Numbers.m), m2: Numbers.m }").with_final_results().run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m1,m2 - 1996-12-20T00:40:02.000000001,18446744073709551615,3650215962958587783,A,, - 1996-12-20T00:40:02.000000001,18446744073709551615,11753611437813598533,B,,24 + 1996-12-20T00:40:02.000000001,18446744073709551615,2867199309159137213,B,,24 + 1996-12-20T00:40:02.000000001,18446744073709551615,12960666915911099378,A,, "###); } diff --git a/crates/sparrow-main/tests/e2e/json_tests.rs b/crates/sparrow-main/tests/e2e/json_tests.rs index 302b948f7..de50ff09e 100644 --- a/crates/sparrow-main/tests/e2e/json_tests.rs +++ b/crates/sparrow-main/tests/e2e/json_tests.rs @@ -68,12 +68,12 @@ pub(crate) async fn invalid_json_data_fixture() -> DataFixture { async fn test_json_parses_field() { insta::assert_snapshot!(QueryFixture::new("let json = json(Json.json) in { a_test: json.a as i64, b_test: json(Json.json).b }").run_to_csv(&json_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a_test,b_test - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10,dog - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4,lizard - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,12,cat - 1996-12-20T00:43:57.000000000,9223372036854775808,3650215962958587783,A,34, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,6,dog + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10,dog + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4,lizard + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,12,cat + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,34, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,6,dog "###) } @@ -81,12 +81,12 @@ async fn test_json_parses_field() { async fn test_json_string_field_usable_in_string_functions() { insta::assert_snapshot!(QueryFixture::new("let json = json(Json.json) in { string: json.b, len: len(json.b) }").run_to_csv(&json_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,string,len - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,dog,3 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,lizard,6 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,cat,3 - 1996-12-20T00:43:57.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,dog,3 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,dog,3 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,lizard,6 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,cat,3 + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,dog,3 "###) } @@ -94,12 +94,12 @@ async fn test_json_string_field_usable_in_string_functions() { async fn test_json_field_number_as_string() { insta::assert_snapshot!(QueryFixture::new("let json = json(Json.json) in { num_as_str: json.a as string, len: len(json.a as string) }").run_to_csv(&json_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,num_as_str,len - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10,2 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4,1 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1,1 - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,12,2 - 1996-12-20T00:43:57.000000000,9223372036854775808,3650215962958587783,A,34,2 - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,6,1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10,2 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4,1 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1,1 + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,12,2 + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,34,2 + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,6,1 "###) } @@ -107,12 +107,12 @@ async fn test_json_field_number_as_string() { async fn test_json_field_as_number_with_addition() { insta::assert_snapshot!(QueryFixture::new("let json = json(Json.json) in { a: json.a, plus_one: (json.a as i64) + 1 }").run_to_csv(&json_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,plus_one - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10,11 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4,5 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1,2 - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,12,13 - 1996-12-20T00:43:57.000000000,9223372036854775808,3650215962958587783,A,34,35 - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,6,7 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10,11 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4,5 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1,2 + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,12,13 + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,34,35 + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,6,7 "###) } @@ -123,12 +123,12 @@ async fn test_incorrect_json_format_produces_null() { // print "null" in other fields. insta::assert_snapshot!(QueryFixture::new("let json = json(Json.json) in { a_test: json.a as i64, b_test: json(Json.json).b }").run_to_csv(&invalid_json_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a_test,b_test - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,12,cat - 1996-12-20T00:43:57.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,6,dog + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,12,cat + 1996-12-20T00:43:57.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,6,dog "###) } diff --git a/crates/sparrow-main/tests/e2e/list_tests.rs b/crates/sparrow-main/tests/e2e/list_tests.rs new file mode 100644 index 000000000..692015357 --- /dev/null +++ b/crates/sparrow-main/tests/e2e/list_tests.rs @@ -0,0 +1,258 @@ +//! e2e tests for list types + +use sparrow_api::kaskada::v1alpha::TableConfig; +use uuid::Uuid; + +use crate::{fixture::DataFixture, QueryFixture}; + +/// Create a simple table with a collection type (map). +pub(crate) async fn list_data_fixture() -> DataFixture { + DataFixture::new() + .with_table_from_files( + TableConfig::new_with_table_source( + "Input", + &Uuid::new_v4(), + "time", + Some("subsort"), + "key", + "", + ), + &["parquet/data_with_list.parquet"], + ) + .await + .unwrap() +} + +#[tokio::test] +async fn test_index_list_i64_static() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Input.i64_list | index(1) }").run_to_csv(&list_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,2 + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,2 + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1,2 + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,2 + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,2 + "###); +} + +#[tokio::test] +async fn test_index_list_i64_dynamic() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Input.i64_list | index(Input.index) }").run_to_csv(&list_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,1 + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,3 + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1,2 + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,3 + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,1 + "###); +} + +#[tokio::test] +async fn test_index_list_string_static() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Input.string_list | index(1) }").run_to_csv(&list_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,bird + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,bird + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1, + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,cat + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1, + "###); +} + +#[tokio::test] +async fn test_index_list_string_dynamic() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Input.string_list | index(Input.index) }").run_to_csv(&list_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,dog + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,fish + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1, + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1, + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,dog + "###); +} + +#[tokio::test] +async fn test_index_list_bool_static() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Input.bool_list | index(1) }").run_to_csv(&list_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,true + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,false + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1,false + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,false + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1, + "###); +} + +#[tokio::test] +async fn test_index_list_bool_dynamic() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Input.bool_list | index(Input.index) }").run_to_csv(&list_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,false + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1, + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1,false + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,true + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,true + "###); +} + +#[tokio::test] +#[ignore = "https://docs.rs/arrow-ord/44.0.0/src/arrow_ord/comparison.rs.html#1746"] +async fn test_list_equality() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Input.bool_list | first() == Input.bool_list }").run_to_csv(&list_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + "###); +} + +#[tokio::test] +async fn test_first_list() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Input.string_list | first() | index(0) }").run_to_csv(&list_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,dog + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,dog + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1,dog + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,dog + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,dog + "###); +} + +#[tokio::test] +async fn test_last_list() { + insta::assert_snapshot!(QueryFixture::new("{ f1: Input.string_list | last() | index(0) }").run_to_csv(&list_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,f1 + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,dog + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,cat + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1, + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,dog + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,dog + "###); +} + +#[tokio::test] +async fn test_list_len() { + insta::assert_snapshot!(QueryFixture::new("{ + len_struct: { s: Input.string_list } | collect(max=null) | list_len(), + len_num: Input.i64_list | list_len(), + len_str: Input.string_list | list_len(), + len_bool: Input.bool_list | list_len(), + } + ").run_to_csv(&list_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,len_struct,len_num,len_str,len_bool + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,1,3,2,2 + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,2,3,3,2 + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1,3,3,0,3 + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,4,3,2,3 + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,5,3,1,1 + "###); +} + +#[tokio::test] +async fn test_list_schemas_are_compatible() { + // This query puts a collect() into a record, which + // does schema validation when constructing the struct array. + let hash = QueryFixture::new( + " + let s_list = Input.string_list + let first_elem = s_list | index(0) + let list_with_first_elems = first_elem | collect(max = null) + in { l: Input.string_list, list_with_first_elems } + ", + ) + .run_to_parquet_hash(&list_data_fixture().await) + .await + .unwrap(); + + assert_eq!( + "C08A8D8F24F79BFAD10822D39F5734B1BCD35B73EC5C4A7AF3A90A5F", + hash + ); +} + +#[tokio::test] +async fn test_using_list_in_get_fails() { + insta::assert_yaml_snapshot!(QueryFixture::new("{ f1: Input.i64_list | get(\"s\") }") + .run_to_csv(&list_data_fixture().await).await.unwrap_err(), @r###" + --- + code: Client specified an invalid argument + message: 1 errors in Fenl statements; see diagnostics + fenl_diagnostics: + - severity: error + code: E0010 + message: Invalid argument type(s) + formatted: + - "error[E0010]: Invalid argument type(s)" + - " --> Query:1:24" + - " |" + - "1 | { f1: Input.i64_list | get(\"s\") }" + - " | ^^^ Invalid types for parameter 'map' in call to 'get'" + - " |" + - " --> internal:1:1" + - " |" + - 1 | $input + - " | ------ Actual type: list" + - " |" + - " --> built-in signature 'get(key: K, map: map) -> V':1:34" + - " |" + - "1 | get(key: K, map: map) -> V" + - " | --------- Expected type: map" + - "" + - "" + "###); +} + +#[tokio::test] +async fn test_incorrect_index_type() { + insta::assert_yaml_snapshot!(QueryFixture::new("{ f1: Input.i64_list | index(\"s\") }") + .run_to_csv(&list_data_fixture().await).await.unwrap_err(), @r###" + --- + code: Client specified an invalid argument + message: 1 errors in Fenl statements; see diagnostics + fenl_diagnostics: + - severity: error + code: E0010 + message: Invalid argument type(s) + formatted: + - "error[E0010]: Invalid argument type(s)" + - " --> Query:1:24" + - " |" + - "1 | { f1: Input.i64_list | index(\"s\") }" + - " | ^^^^^ --- Actual type: string" + - " | | " + - " | Invalid types for parameter 'i' in call to 'index'" + - " |" + - " --> built-in signature 'index(i: i64, list: list) -> T':1:18" + - " |" + - "1 | index(i: i64, list: list) -> T" + - " | --- Expected type: i64" + - "" + - "" + "###); +} + +#[tokio::test] +async fn test_incorrect_index_type_field() { + insta::assert_yaml_snapshot!(QueryFixture::new("{ f1: Input.i64_list | index(Input.bool_list) }") + .run_to_csv(&list_data_fixture().await).await.unwrap_err(), @r###" + --- + code: Client specified an invalid argument + message: 1 errors in Fenl statements; see diagnostics + fenl_diagnostics: + - severity: error + code: E0010 + message: Invalid argument type(s) + formatted: + - "error[E0010]: Invalid argument type(s)" + - " --> Query:1:24" + - " |" + - "1 | { f1: Input.i64_list | index(Input.bool_list) }" + - " | ^^^^^ --------------- Actual type: list" + - " | | " + - " | Invalid types for parameter 'i' in call to 'index'" + - " |" + - " --> built-in signature 'index(i: i64, list: list) -> T':1:18" + - " |" + - "1 | index(i: i64, list: list) -> T" + - " | --- Expected type: i64" + - "" + - "" + "###); +} diff --git a/crates/sparrow-main/tests/e2e/logical_tests.rs b/crates/sparrow-main/tests/e2e/logical_tests.rs index 9a92fa0d9..e59e2f7b5 100644 --- a/crates/sparrow-main/tests/e2e/logical_tests.rs +++ b/crates/sparrow-main/tests/e2e/logical_tests.rs @@ -10,13 +10,13 @@ use crate::QueryFixture; async fn test_not_boolean() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, not_a: !Booleans.a }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,not_a - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,false - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,false - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,true - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,false + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,false + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,true + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,, "###); } @@ -24,13 +24,13 @@ async fn test_not_boolean() { async fn test_logical_or_boolean() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, b: Booleans.b, logical_or: Booleans.a or Booleans.b }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,b,logical_or - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,true,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,false,false - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,true,true - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,false,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,true,true - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,, - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,true,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,false,false + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,true,true + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,false,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,true,true + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,, + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,,, "###); } @@ -38,13 +38,13 @@ async fn test_logical_or_boolean() { async fn test_logical_and_boolean() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, b: Booleans.b, logical_and: Booleans.a and Booleans.b }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,b,logical_and - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,true,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,false,false - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,true, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,false,false - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,true,false - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,,false - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,true,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,false,false + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,true, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,false,false + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,true,false + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,,false + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,,, "###); } @@ -52,13 +52,13 @@ async fn test_logical_and_boolean() { async fn test_if_boolean() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, if_bool: Booleans.a | if(Booleans.a) }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,if_bool - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false, - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false, + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,, "###); } @@ -66,12 +66,12 @@ async fn test_if_boolean() { async fn test_if_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, if_i64: Numbers.m | if(Numbers.m == 5) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,if_i64 - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -79,12 +79,12 @@ async fn test_if_i64() { async fn test_if_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, if_f64: Numbers.m | if(Numbers.m == 5.2) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,if_f64 - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,5.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,5.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -92,12 +92,12 @@ async fn test_if_f64() { async fn test_if_string() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, if_string: Strings.s | if(Strings.s == \"hEllo\") }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,if_string - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye, "###); } @@ -105,12 +105,12 @@ async fn test_if_string() { async fn test_if_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ t: Times.n, if_ts: Times.n | if(Times.key == \"B\") }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t,if_ts - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23,23 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23,23 "###); } @@ -118,12 +118,12 @@ async fn test_if_timestamp_ns() { async fn test_if_record() { insta::assert_snapshot!(QueryFixture::new("{ t: Times.n, if_record: Times | if(Times.key == \"B\") | $input.n }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t,if_record - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23,23 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23,23 "###); } @@ -131,12 +131,12 @@ async fn test_if_record() { async fn test_if_literal() { insta::assert_snapshot!(QueryFixture::new("{ if_literal: 1 | if(Times.key == \"B\") }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,if_literal - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1 "###); } @@ -144,13 +144,13 @@ async fn test_if_literal() { async fn test_null_if_boolean() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, null_if_bool: Booleans.a | null_if(Booleans.a) }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,null_if_bool - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true, - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,false - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,false - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,false - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true, + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,false + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,false + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,false + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,, "###); } @@ -158,12 +158,12 @@ async fn test_null_if_boolean() { async fn test_null_if_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, null_if_i64: Numbers.m | null_if(Numbers.m == 5) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,null_if_i64 - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -171,12 +171,12 @@ async fn test_null_if_i64() { async fn test_null_if_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, null_if_f64: Numbers.m | null_if(Numbers.m == 5.2) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,null_if_f64 - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,24.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,17.6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,12.4 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,24.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,17.6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,12.4 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -184,12 +184,12 @@ async fn test_null_if_f64() { async fn test_null_if_string() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, null_if_string: Strings.s | null_if(Strings.s == \"hEllo\") }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,null_if_string - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo, - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World,World - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye,goodbye + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo, + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World,World + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye,goodbye "###); } @@ -197,12 +197,12 @@ async fn test_null_if_string() { async fn test_null_if_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ t: Times.n, null_if_ts: Times.n | null_if(Times.key == \"B\") }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t,null_if_ts - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2,2 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23, "###); } @@ -210,12 +210,12 @@ async fn test_null_if_timestamp_ns() { async fn test_null_if_record() { insta::assert_snapshot!(QueryFixture::new("{ n: Times.n, null_if_record: Times | null_if(Times.key == \"B\") | $input.n }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,null_if_record - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2,2 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23, "###); } @@ -223,12 +223,12 @@ async fn test_null_if_record() { async fn test_null_if_literal() { insta::assert_snapshot!(QueryFixture::new("{ null_if_literal: 1 | null_if(Times.key == \"B\") }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,null_if_literal - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B, "###); } @@ -236,13 +236,13 @@ async fn test_null_if_literal() { async fn test_else_boolean() { insta::assert_snapshot!(QueryFixture::new("{ a: Booleans.a, b: Booleans.b, a_else_b: Booleans.a | else(Booleans.b) }").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,b,a_else_b - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,true,true - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,false,false,false - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,true,true - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,true,false,true - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,false,true,false - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,false,,false - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,true,true + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,false,false,false + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,true,true + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,true,false,true + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,false,true,false + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,false,,false + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,,, "###); } @@ -250,12 +250,12 @@ async fn test_else_boolean() { async fn test_else_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, m_else_n: Numbers.m | else(Numbers.n) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,m_else_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,24 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,17 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,9 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,24 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,17 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,9 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -263,12 +263,12 @@ async fn test_else_i64() { async fn test_else_f64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, m_else_n: Numbers.m | else(Numbers.n) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,m_else_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,5.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,24.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,17.6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,,12.4 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,5.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,24.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,17.6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,,12.4 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -276,12 +276,12 @@ async fn test_else_f64() { async fn test_else_string() { insta::assert_snapshot!(QueryFixture::new("{ s: Strings.s, t: Strings.t, s_else_t: Strings.s | else(Strings.t) }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,s,t,s_else_t - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hEllo,hEllo,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,World,world,World - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world,hello world,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,greetings, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,salutations, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye,,goodbye + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hEllo,hEllo,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,World,world,World + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world,hello world,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,greetings, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,salutations, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye,,goodbye "###); } @@ -289,12 +289,12 @@ async fn test_else_string() { async fn test_else_timestamp_ns() { insta::assert_snapshot!(QueryFixture::new("{ m: Times.m, n: Times.n, m_else_n: Times.m | else(Times.n) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,m_else_n - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,4,2,4 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,3,4,3 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,5,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,11,23,11 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,4,2,4 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,3,4,3 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,5,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,11,23,11 "###); } @@ -302,12 +302,12 @@ async fn test_else_timestamp_ns() { async fn test_else_record() { insta::assert_snapshot!(QueryFixture::new("{ m: Times.m, n: Times.n, times_else_times: Times | else(Times) | $input.n }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,times_else_times - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,4,2,2 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,3,4,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,5,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,11,23,23 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,4,2,2 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,3,4,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,5,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,11,23,23 "###); } @@ -315,12 +315,12 @@ async fn test_else_record() { async fn test_else_literal() { insta::assert_snapshot!(QueryFixture::new("{ n: Times.n, n_else_literal: Times.n | else(12345) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,n_else_literal - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,2,2 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,4,4 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5,5 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,12345 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,8,8 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,23,23 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,4,4 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5,5 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,12345 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,8,8 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,23,23 "###); } @@ -329,8 +329,8 @@ async fn test_if_record_nested_is_valid_num_eq() { // Test for https://gitlab.com/kaskada/kaskada/-/issues/342 insta::assert_snapshot!(QueryFixture::new("Times | extend({gr_5: $input.n > 5}) | if(Times.n > 5) | when(is_valid($input.key))").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,gr_5,time,subsort,key,n,m,other_time,fruit - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,true,1998-12-13T00:43:57.000000000,0,B,8,8,, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,true,2004-12-06T00:44:57.000000000,0,B,23,11,1994-12-20T00:39:57.000000000,mango + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,true,1998-12-13T00:43:57.000000000,0,B,8,8,, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,true,2004-12-06T00:44:57.000000000,0,B,23,11,1994-12-20T00:39:57.000000000,mango "###); } @@ -338,7 +338,7 @@ async fn test_if_record_nested_is_valid_num_eq() { async fn test_if_record_nested_is_valid_string_eq() { insta::assert_snapshot!(QueryFixture::new("Times | extend({eq_A: len($input.fruit) > 6}) | if (len(Times.fruit) > 6) | when(is_valid($input.key))").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,eq_A,time,subsort,key,n,m,other_time,fruit - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,true,1995-10-20T00:40:57.000000000,0,B,4,3,1994-11-20T00:39:57.000000000,watermelon + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,true,1995-10-20T00:40:57.000000000,0,B,4,3,1994-11-20T00:39:57.000000000,watermelon "###); } @@ -346,12 +346,12 @@ async fn test_if_record_nested_is_valid_string_eq() { async fn test_if_null_condition_number() { insta::assert_snapshot!(QueryFixture::new("Times | if ($input.n > 0) | extend({ cond: Times.n > 0 })").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,cond,time,subsort,key,n,m,other_time,fruit - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,1994-12-20T00:39:57.000000000,0,A,2,4,2003-12-20T00:39:57.000000000,pear - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,true,1995-10-20T00:40:57.000000000,0,B,4,3,1994-11-20T00:39:57.000000000,watermelon - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,true,1996-08-20T00:41:57.000000000,0,B,5,,1998-12-20T00:39:57.000000000,mango - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,true,1998-12-13T00:43:57.000000000,0,B,8,8,, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,true,2004-12-06T00:44:57.000000000,0,B,23,11,1994-12-20T00:39:57.000000000,mango + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,1994-12-20T00:39:57.000000000,0,A,2,4,2003-12-20T00:39:57.000000000,pear + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,true,1995-10-20T00:40:57.000000000,0,B,4,3,1994-11-20T00:39:57.000000000,watermelon + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,true,1996-08-20T00:41:57.000000000,0,B,5,,1998-12-20T00:39:57.000000000,mango + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,true,1998-12-13T00:43:57.000000000,0,B,8,8,, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,true,2004-12-06T00:44:57.000000000,0,B,23,11,1994-12-20T00:39:57.000000000,mango "###); } @@ -359,12 +359,12 @@ async fn test_if_null_condition_number() { async fn test_if_null_condition_string_equality() { insta::assert_snapshot!(QueryFixture::new("Times | if ($input.fruit == \"mango\")").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,n,m,other_time,fruit - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,,,,,,, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,0,B,5,,1998-12-20T00:39:57.000000000,mango - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000,0,B,23,11,1994-12-20T00:39:57.000000000,mango + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,,,,,,, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,0,B,5,,1998-12-20T00:39:57.000000000,mango + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000,0,B,23,11,1994-12-20T00:39:57.000000000,mango "###); } @@ -372,12 +372,12 @@ async fn test_if_null_condition_string_equality() { async fn test_null_if_null_condition() { insta::assert_snapshot!(QueryFixture::new("Times | null_if ($input.n > 6)").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,n,m,other_time,fruit - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000,0,A,2,4,2003-12-20T00:39:57.000000000,pear - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,0,B,4,3,1994-11-20T00:39:57.000000000,watermelon - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,0,B,5,,1998-12-20T00:39:57.000000000,mango - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000,0,A,2,4,2003-12-20T00:39:57.000000000,pear + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,0,B,4,3,1994-11-20T00:39:57.000000000,watermelon + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,0,B,5,,1998-12-20T00:39:57.000000000,mango + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,, "###); } @@ -386,11 +386,11 @@ async fn test_null_if_condition_null_values() { // Ensure that null rows are produced insta::assert_snapshot!(QueryFixture::new("Times | if ($input.n < 0)").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,n,m,other_time,fruit - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,,,,,,, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,, - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,,,,,,, + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,,,,,,, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,, + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,,,,,,, "###); } diff --git a/crates/sparrow-main/tests/e2e/lookup_tests.rs b/crates/sparrow-main/tests/e2e/lookup_tests.rs index 162dd16aa..086afc081 100644 --- a/crates/sparrow-main/tests/e2e/lookup_tests.rs +++ b/crates/sparrow-main/tests/e2e/lookup_tests.rs @@ -78,12 +78,12 @@ async fn test_lookup_self_i64() { let last_sender_sum_sent = lookup(last(Received.from), sum_sent) in { last_sender, last_sender_sum_sent }").run_to_csv(&lookup_account_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,last_sender,last_sender_sum_sent - 1996-12-20T00:39:57.000000000,9223372036854775808,1575016611515860288,2,0,50 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,0,61 - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,2,25 - 1998-12-20T00:39:57.000000000,9223372036854775811,2359047937476779835,1,0,86 - 1999-12-20T00:39:58.000000000,9223372036854775812,2359047937476779835,1,0,98 - 1999-12-20T00:39:58.000000000,9223372036854775813,2359047937476779835,1,0,98 + 1996-12-20T00:39:57.000000000,9223372036854775808,2694864431690786590,2,0,50 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,0,61 + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,2,25 + 1998-12-20T00:39:57.000000000,9223372036854775811,18433805721903975440,1,0,86 + 1999-12-20T00:39:58.000000000,9223372036854775812,18433805721903975440,1,0,98 + 1999-12-20T00:39:58.000000000,9223372036854775813,18433805721903975440,1,0,98 "###); } @@ -94,17 +94,17 @@ async fn test_lookup_self_i64_with_merge_interpolation() { let last_sender_sum_sent = lookup(last(Received.from), sum_sent) in { sum_sent, last_sender, last_sender_sum_sent }").run_to_csv(&lookup_account_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_sent,last_sender,last_sender_sum_sent - 1996-12-20T00:39:57.000000000,9223372036854775808,1575016611515860288,2,,0,50 - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,50,, - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,61,0,61 - 1997-12-20T00:39:58.000000000,9223372036854775810,1575016611515860288,2,25,0,50 - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,61,2,25 - 1998-12-20T00:39:57.000000000,9223372036854775811,2359047937476779835,1,,0,86 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,86,2,25 - 1999-12-20T00:39:58.000000000,9223372036854775812,2359047937476779835,1,,0,98 - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,98,2,25 - 1999-12-20T00:39:58.000000000,9223372036854775813,2359047937476779835,1,,0,98 - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,98,2,25 + 1996-12-20T00:39:57.000000000,9223372036854775808,2694864431690786590,2,,0,50 + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,50,, + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,61,0,61 + 1997-12-20T00:39:58.000000000,9223372036854775810,2694864431690786590,2,25,0,50 + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,61,2,25 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,86,2,25 + 1998-12-20T00:39:57.000000000,9223372036854775811,18433805721903975440,1,,0,86 + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,98,2,25 + 1999-12-20T00:39:58.000000000,9223372036854775812,18433805721903975440,1,,0,98 + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,98,2,25 + 1999-12-20T00:39:58.000000000,9223372036854775813,18433805721903975440,1,,0,98 "###); } @@ -114,12 +114,12 @@ async fn test_lookup_self_string() { let last_sender_description = lookup(last_sender, last(Sent.description)) in { description: Received.description, last_sender, last_sender_description }").run_to_csv(&lookup_account_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,description,last_sender,last_sender_description - 1996-12-20T00:39:57.000000000,9223372036854775808,1575016611515860288,2,food,0,food - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,gas,0,gas - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,food,2,food - 1998-12-20T00:39:57.000000000,9223372036854775811,2359047937476779835,1,gas,0,gas - 1999-12-20T00:39:58.000000000,9223372036854775812,2359047937476779835,1,MOVIe,0,MOVIe - 1999-12-20T00:39:58.000000000,9223372036854775813,2359047937476779835,1,null_amount,0,null_amount + 1996-12-20T00:39:57.000000000,9223372036854775808,2694864431690786590,2,food,0,food + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,gas,0,gas + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,food,2,food + 1998-12-20T00:39:57.000000000,9223372036854775811,18433805721903975440,1,gas,0,gas + 1999-12-20T00:39:58.000000000,9223372036854775812,18433805721903975440,1,MOVIe,0,MOVIe + 1999-12-20T00:39:58.000000000,9223372036854775813,18433805721903975440,1,null_amount,0,null_amount "###); } @@ -129,17 +129,17 @@ async fn test_lookup_self_record() { let last_sender_sent = lookup(last(Received.to), Sent.description) in Sent | extend({ received_description: Received.description, last_sender, last_sender_sent })").run_to_csv(&lookup_account_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,received_description,last_sender,last_sender_sent,from,to,time,subsort,amount,description,order_time,code - 1996-12-20T00:39:57.000000000,9223372036854775808,1575016611515860288,2,food,0,,,,,,,,, - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,,,,0,2,1996-12-20T00:39:57.000000000,0,50,food,2005-12-19T16:39:57-08:00,5 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,gas,0,gas,0,0,1997-12-20T00:39:57.000000000,1,11,gas,2001-12-19T16:39:57-08:00,6 - 1997-12-20T00:39:58.000000000,9223372036854775810,1575016611515860288,2,,0,,2,0,1997-12-20T00:39:58.000000000,2,25,food,2001-12-19T16:39:57-08:00,5 - 1997-12-20T00:39:58.000000000,9223372036854775810,14253486467890685049,0,food,2,,,,,,,,, - 1998-12-20T00:39:57.000000000,9223372036854775811,2359047937476779835,1,gas,0,,,,,,,,, - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,,2,,0,1,1998-12-20T00:39:57.000000000,3,25,gas,2003-12-19T16:39:57-08:00,6 - 1999-12-20T00:39:58.000000000,9223372036854775812,2359047937476779835,1,MOVIe,0,,,,,,,,, - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,,2,,0,1,1999-12-20T00:39:58.000000000,4,12,MOVIe,2004-12-1,7 - 1999-12-20T00:39:58.000000000,9223372036854775813,2359047937476779835,1,null_amount,0,,,,,,,,, - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,,2,,0,1,1999-12-20T00:39:58.000000000,5,,null_amount,2005-12-1, + 1996-12-20T00:39:57.000000000,9223372036854775808,2694864431690786590,2,food,0,,,,,,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,,,,0,2,1996-12-20T00:39:57.000000000,0,50,food,2005-12-19T16:39:57-08:00,5 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,gas,0,gas,0,0,1997-12-20T00:39:57.000000000,1,11,gas,2001-12-19T16:39:57-08:00,6 + 1997-12-20T00:39:58.000000000,9223372036854775810,2694864431690786590,2,,0,,2,0,1997-12-20T00:39:58.000000000,2,25,food,2001-12-19T16:39:57-08:00,5 + 1997-12-20T00:39:58.000000000,9223372036854775810,11832085162654999889,0,food,2,,,,,,,,, + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,,2,,0,1,1998-12-20T00:39:57.000000000,3,25,gas,2003-12-19T16:39:57-08:00,6 + 1998-12-20T00:39:57.000000000,9223372036854775811,18433805721903975440,1,gas,0,,,,,,,,, + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,,2,,0,1,1999-12-20T00:39:58.000000000,4,12,MOVIe,2004-12-1,7 + 1999-12-20T00:39:58.000000000,9223372036854775812,18433805721903975440,1,MOVIe,0,,,,,,,,, + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,,2,,0,1,1999-12-20T00:39:58.000000000,5,,null_amount,2005-12-1, + 1999-12-20T00:39:58.000000000,9223372036854775813,18433805721903975440,1,null_amount,0,,,,,,,,, "###); } @@ -148,12 +148,12 @@ async fn test_lookup_code_name() { insta::assert_snapshot!(QueryFixture::new("{ code: Sent.code, code_name: lookup(Sent.code, CodeName.name | last()) }").run_to_csv(&lookup_account_data_fixture().await).await .unwrap(), @r###" _time,_subsort,_key_hash,_key,code,code_name - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,5,FiveA - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,6,Six - 1997-12-20T00:39:58.000000000,9223372036854775810,1575016611515860288,2,5,FiveB - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,6,Six - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,7, - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,5,FiveA + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,6,Six + 1997-12-20T00:39:58.000000000,9223372036854775810,2694864431690786590,2,5,FiveB + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,6,Six + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,7, + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,, "###); } @@ -164,12 +164,12 @@ async fn test_lookup_code_name_wacky_unused() { { code: Sent.code, code_name: lookup(Sent.code, CodeName.name | last()) }").run_to_csv(&lookup_account_data_fixture().await).await .unwrap(), @r###" _time,_subsort,_key_hash,_key,code,code_name - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,5,FiveA - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,6,Six - 1997-12-20T00:39:58.000000000,9223372036854775810,1575016611515860288,2,5,FiveB - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,6,Six - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,7, - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,5,FiveA + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,6,Six + 1997-12-20T00:39:58.000000000,9223372036854775810,2694864431690786590,2,5,FiveB + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,6,Six + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,7, + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,, "###); } @@ -327,12 +327,12 @@ async fn test_lookup_with_key() { .run_to_csv(&lookup_account_data_fixture().await).await .unwrap(), @r###" _time,_subsort,_key_hash,_key,code_name - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,1 - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0, - 1997-12-20T00:39:58.000000000,9223372036854775810,1575016611515860288,2,1 - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0, - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0, - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0, + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,1 + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0, + 1997-12-20T00:39:58.000000000,9223372036854775810,2694864431690786590,2,1 + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0, + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0, + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0, "###); } @@ -450,12 +450,12 @@ async fn test_lookup_only_includes_primary_entites() { // there should only be 2 entities in the rows. insta::assert_snapshot!(QueryFixture::new("{ description: lookup(last(Sent.to), Received.description) }").run_to_csv(&lookup_account_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,description - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,food - 1997-12-20T00:39:57.000000000,9223372036854775809,14253486467890685049,0,gas - 1997-12-20T00:39:58.000000000,9223372036854775810,1575016611515860288,2,food - 1998-12-20T00:39:57.000000000,9223372036854775811,14253486467890685049,0,gas - 1999-12-20T00:39:58.000000000,9223372036854775812,14253486467890685049,0,MOVIe - 1999-12-20T00:39:58.000000000,9223372036854775813,14253486467890685049,0,null_amount + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,food + 1997-12-20T00:39:57.000000000,9223372036854775809,11832085162654999889,0,gas + 1997-12-20T00:39:58.000000000,9223372036854775810,2694864431690786590,2,food + 1998-12-20T00:39:57.000000000,9223372036854775811,11832085162654999889,0,gas + 1999-12-20T00:39:58.000000000,9223372036854775812,11832085162654999889,0,MOVIe + 1999-12-20T00:39:58.000000000,9223372036854775813,11832085162654999889,0,null_amount "###); } @@ -463,7 +463,7 @@ async fn test_lookup_only_includes_primary_entites() { async fn test_lookup_only_includes_primary_entites_final_results() { insta::assert_snapshot!(QueryFixture::new("{ description: lookup(last(Sent.to), Received.description) }").with_final_results().run_to_csv(&lookup_account_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,description - 1999-12-20T00:39:58.000000001,18446744073709551615,1575016611515860288,2,food - 1999-12-20T00:39:58.000000001,18446744073709551615,14253486467890685049,0,null_amount + 1999-12-20T00:39:58.000000001,18446744073709551615,2694864431690786590,2,food + 1999-12-20T00:39:58.000000001,18446744073709551615,11832085162654999889,0,null_amount "###); } diff --git a/crates/sparrow-main/tests/e2e/main.rs b/crates/sparrow-main/tests/e2e/main.rs index c629a920e..97f25c56a 100644 --- a/crates/sparrow-main/tests/e2e/main.rs +++ b/crates/sparrow-main/tests/e2e/main.rs @@ -19,6 +19,7 @@ mod aggregation_tests; mod basic_error_tests; mod cast_tests; mod coalesce_tests; +mod collect_tests; mod comparison_tests; mod decoration_tests; mod entity_key_output_tests; @@ -26,6 +27,7 @@ mod equality_tests; mod formula_tests; mod general_tests; mod json_tests; +mod list_tests; mod logical_tests; mod lookup_tests; mod map_tests; diff --git a/crates/sparrow-main/tests/e2e/map_tests.rs b/crates/sparrow-main/tests/e2e/map_tests.rs index 51b516ea1..5b21dfdc6 100644 --- a/crates/sparrow-main/tests/e2e/map_tests.rs +++ b/crates/sparrow-main/tests/e2e/map_tests.rs @@ -27,11 +27,11 @@ pub(crate) async fn map_data_fixture() -> DataFixture { async fn test_string_to_i64_get_static_key() { insta::assert_snapshot!(QueryFixture::new("{ f1: get(\"f1\", Input.s_to_i64) }").run_to_csv(&map_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,f1 - 1996-12-19T16:39:57.000000000,0,2359047937476779835,1,0 - 1996-12-19T16:40:57.000000000,0,2359047937476779835,1,1 - 1996-12-19T16:40:59.000000000,0,2359047937476779835,1,5 - 1996-12-19T16:41:57.000000000,0,2359047937476779835,1, - 1996-12-19T16:42:57.000000000,0,2359047937476779835,1,15 + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,0 + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,1 + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1,5 + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1, + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,15 "###); } @@ -39,11 +39,11 @@ async fn test_string_to_i64_get_static_key() { async fn test_string_to_i64_get_static_key_second_field() { insta::assert_snapshot!(QueryFixture::new("{ f2: Input.s_to_i64 | get(\"f2\") }").run_to_csv(&map_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,f2 - 1996-12-19T16:39:57.000000000,0,2359047937476779835,1,22 - 1996-12-19T16:40:57.000000000,0,2359047937476779835,1,10 - 1996-12-19T16:40:59.000000000,0,2359047937476779835,1,3 - 1996-12-19T16:41:57.000000000,0,2359047937476779835,1,13 - 1996-12-19T16:42:57.000000000,0,2359047937476779835,1, + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,22 + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,10 + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1,3 + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,13 + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1, "###); } @@ -51,11 +51,11 @@ async fn test_string_to_i64_get_static_key_second_field() { async fn test_string_to_i64_get_dynamic_key() { insta::assert_snapshot!(QueryFixture::new("{ value: Input.s_to_i64 | get(Input.s_to_i64_key) }").run_to_csv(&map_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,value - 1996-12-19T16:39:57.000000000,0,2359047937476779835,1,0 - 1996-12-19T16:40:57.000000000,0,2359047937476779835,1,10 - 1996-12-19T16:40:59.000000000,0,2359047937476779835,1, - 1996-12-19T16:41:57.000000000,0,2359047937476779835,1,13 - 1996-12-19T16:42:57.000000000,0,2359047937476779835,1,11 + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,0 + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,10 + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1, + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,13 + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,11 "###); } @@ -63,11 +63,11 @@ async fn test_string_to_i64_get_dynamic_key() { async fn test_i64_to_i64_get_static_key() { insta::assert_snapshot!(QueryFixture::new("{ f1: get(1, Input.i64_to_i64) }").run_to_csv(&map_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,f1 - 1996-12-19T16:39:57.000000000,0,2359047937476779835,1,1 - 1996-12-19T16:40:57.000000000,0,2359047937476779835,1,2 - 1996-12-19T16:40:59.000000000,0,2359047937476779835,1, - 1996-12-19T16:41:57.000000000,0,2359047937476779835,1, - 1996-12-19T16:42:57.000000000,0,2359047937476779835,1,10 + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,1 + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,2 + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1, + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1, + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,10 "###); } @@ -76,11 +76,11 @@ async fn test_u64_to_str_get_static_key() { // Ideally we don't have to specify `as u64`. See https://github.com/kaskada-ai/kaskada/issues/534 insta::assert_snapshot!(QueryFixture::new("{ f1: get(4 as u64, Input.u64_to_s) }").run_to_csv(&map_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,f1 - 1996-12-19T16:39:57.000000000,0,2359047937476779835,1, - 1996-12-19T16:40:57.000000000,0,2359047937476779835,1,cat - 1996-12-19T16:40:59.000000000,0,2359047937476779835,1, - 1996-12-19T16:41:57.000000000,0,2359047937476779835,1, - 1996-12-19T16:42:57.000000000,0,2359047937476779835,1,plant + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1, + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,cat + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1, + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1, + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,plant "###); } @@ -89,11 +89,11 @@ async fn test_u64_to_bool_get_static_key() { // Ideally we don't have to specify `as u64`. See https://github.com/kaskada-ai/kaskada/issues/534 insta::assert_snapshot!(QueryFixture::new("{ f1: get(4 as u64, Input.u64_to_bool) }").run_to_csv(&map_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,f1 - 1996-12-19T16:39:57.000000000,0,2359047937476779835,1, - 1996-12-19T16:40:57.000000000,0,2359047937476779835,1,false - 1996-12-19T16:40:59.000000000,0,2359047937476779835,1, - 1996-12-19T16:41:57.000000000,0,2359047937476779835,1, - 1996-12-19T16:42:57.000000000,0,2359047937476779835,1,true + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1, + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,false + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1, + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1, + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,true "###); } @@ -101,11 +101,11 @@ async fn test_u64_to_bool_get_static_key() { async fn test_bool_to_s_get_static_key() { insta::assert_snapshot!(QueryFixture::new("{ f1: get(true, Input.bool_to_s) }").run_to_csv(&map_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,f1 - 1996-12-19T16:39:57.000000000,0,2359047937476779835,1,dog - 1996-12-19T16:40:57.000000000,0,2359047937476779835,1,cat - 1996-12-19T16:40:59.000000000,0,2359047937476779835,1, - 1996-12-19T16:41:57.000000000,0,2359047937476779835,1,bird - 1996-12-19T16:42:57.000000000,0,2359047937476779835,1,plant + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,dog + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,cat + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1, + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,bird + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,plant "###); } @@ -115,11 +115,11 @@ async fn test_s_to_i64_get_with_first_last_agg() { // is applied over the _map_ value, which does not necessarily hold an "f2" key. insta::assert_snapshot!(QueryFixture::new("{ first_f2: Input.s_to_i64 | first() | get(\"f2\"), last_f2: Input.s_to_i64 | last() | get(\"f2\") }").run_to_csv(&map_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,first_f2,last_f2 - 1996-12-19T16:39:57.000000000,0,2359047937476779835,1,22,22 - 1996-12-19T16:40:57.000000000,0,2359047937476779835,1,22,10 - 1996-12-19T16:40:59.000000000,0,2359047937476779835,1,22,3 - 1996-12-19T16:41:57.000000000,0,2359047937476779835,1,22,13 - 1996-12-19T16:42:57.000000000,0,2359047937476779835,1,22, + 1996-12-19T16:39:57.000000000,0,18433805721903975440,1,22,22 + 1996-12-19T16:40:57.000000000,0,18433805721903975440,1,22,10 + 1996-12-19T16:40:59.000000000,0,18433805721903975440,1,22,3 + 1996-12-19T16:41:57.000000000,0,18433805721903975440,1,22,13 + 1996-12-19T16:42:57.000000000,0,18433805721903975440,1,22, "###); } @@ -127,11 +127,11 @@ async fn test_s_to_i64_get_with_first_last_agg() { async fn test_map_output_into_sum_aggregation() { insta::assert_snapshot!(QueryFixture::new("{ sum: Input.s_to_i64 | get(\"f1\") | sum(), value: Input.s_to_i64 | get(Input.s_to_i64_key) } | with_key(Input.s_to_i64_key)").run_to_csv(&map_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum,value - 1996-12-19T16:39:57.000000000,0,18146622110643880433,f1,0,0 - 1996-12-19T16:40:57.000000000,0,7541589802123724450,f2,1,10 - 1996-12-19T16:40:59.000000000,0,5533153676183607778,f3,6, - 1996-12-19T16:41:57.000000000,0,7541589802123724450,f2,6,13 - 1996-12-19T16:42:57.000000000,0,5533153676183607778,f3,21,11 + 1996-12-19T16:39:57.000000000,0,16639162690259065874,f1,0,0 + 1996-12-19T16:40:57.000000000,0,10324241474229410792,f2,1,10 + 1996-12-19T16:40:59.000000000,0,1256917131994086784,f3,6, + 1996-12-19T16:41:57.000000000,0,10324241474229410792,f2,6,13 + 1996-12-19T16:42:57.000000000,0,1256917131994086784,f3,21,11 "###); } @@ -158,7 +158,7 @@ async fn test_query_with_merge_and_map_output() { .unwrap(); assert_eq!( - "92C3C8B7E6AE6AF41266B63F3FBE11958DB5BFD23B58E891963F6287", + "E28AF590FCD81C1F47F79355DBCADDCBB76CF0B15091EB4D07F2E900", hash ); } @@ -173,7 +173,7 @@ async fn test_first_last_map() { .await .unwrap(); - let expected = "AB719CF6634779A5285D699A178AC69354696872E3733AA9388C9A6A"; + let expected = "D88A77A7F5172527642A4784A7482D17303FBE510435AA5E6BD3965C"; assert_eq!(hash, expected); } @@ -230,3 +230,35 @@ async fn test_incompatible_key_types() { - "" "###); } + +#[tokio::test] +async fn test_using_map_in_index_fails() { + insta::assert_yaml_snapshot!(QueryFixture::new("{ f1: Input.i64_to_i64 | index(0) }") + .run_to_csv(&map_data_fixture().await).await.unwrap_err(), @r###" + --- + code: Client specified an invalid argument + message: 1 errors in Fenl statements; see diagnostics + fenl_diagnostics: + - severity: error + code: E0010 + message: Invalid argument type(s) + formatted: + - "error[E0010]: Invalid argument type(s)" + - " --> Query:1:26" + - " |" + - "1 | { f1: Input.i64_to_i64 | index(0) }" + - " | ^^^^^ Invalid types for parameter 'list' in call to 'index'" + - " |" + - " --> internal:1:1" + - " |" + - 1 | $input + - " | ------ Actual type: map" + - " |" + - " --> built-in signature 'index(i: i64, list: list) -> T':1:29" + - " |" + - "1 | index(i: i64, list: list) -> T" + - " | ------- Expected type: list" + - "" + - "" + "###); +} diff --git a/crates/sparrow-main/tests/e2e/math_tests.rs b/crates/sparrow-main/tests/e2e/math_tests.rs index 0d8703cb6..f98fed8f8 100644 --- a/crates/sparrow-main/tests/e2e/math_tests.rs +++ b/crates/sparrow-main/tests/e2e/math_tests.rs @@ -7,12 +7,12 @@ use crate::QueryFixture; async fn test_i64_add() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, add: Numbers.m + Numbers.n}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,add - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,15 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,27 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,23 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,15 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,27 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,23 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -20,12 +20,12 @@ async fn test_i64_add() { async fn test_f64_add() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, add: Numbers.m + Numbers.n}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,add - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,15.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,28.2 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,23.8 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,15.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,28.2 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,23.8 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,, "###); } @@ -33,12 +33,12 @@ async fn test_f64_add() { async fn test_i64_add_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, add: Numbers.m + 1}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,add - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,6 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,25 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,18 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,13 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,6 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,25 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,18 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,13 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -46,12 +46,12 @@ async fn test_i64_add_literal() { async fn test_f64_add_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, add: Numbers.m + 1}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,add - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,6.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,25.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,18.6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,13.4 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,6.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,25.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,18.6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,13.4 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -59,12 +59,12 @@ async fn test_f64_add_literal() { async fn test_i64_sub() { insta::assert_snapshot!(QueryFixture::new("{ sub: Numbers.m - Numbers.n}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sub - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,-5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,21 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,11 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,-5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,21 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,11 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } @@ -72,12 +72,12 @@ async fn test_i64_sub() { async fn test_f64_sub() { insta::assert_snapshot!(QueryFixture::new("{ sub: Numbers.m - Numbers.n}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sub - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,-4.8 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,20.400000000000002 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,11.400000000000002 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,-4.8 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,20.400000000000002 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,11.400000000000002 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } @@ -85,12 +85,12 @@ async fn test_f64_sub() { async fn test_i64_sub_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, sub: Numbers.m - 1}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,sub - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,4 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,23 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,16 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,11 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,4 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,23 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,16 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,11 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -98,12 +98,12 @@ async fn test_i64_sub_literal() { async fn test_f64_sub_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, sub: Numbers.m - 1}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,sub - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,4.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,23.3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,16.6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,11.4 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,4.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,23.3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,16.6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,11.4 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -111,12 +111,12 @@ async fn test_f64_sub_literal() { async fn test_i64_mul() { insta::assert_snapshot!(QueryFixture::new("{ mul: Numbers.m * Numbers.n}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,mul - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,50 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,72 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,102 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,50 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,72 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,102 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } @@ -124,12 +124,12 @@ async fn test_i64_mul() { async fn test_f64_mul() { insta::assert_snapshot!(QueryFixture::new("{ mul: Numbers.m * Numbers.n}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,mul - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,52.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,94.77 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,109.12000000000002 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,52.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,94.77 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,109.12000000000002 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } @@ -137,12 +137,12 @@ async fn test_f64_mul() { async fn test_i64_mul_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, mul: Numbers.m * 2}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,mul - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,48 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,34 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,24 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,48 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,34 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,24 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -150,12 +150,12 @@ async fn test_i64_mul_literal() { async fn test_f64_mul_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, mul: Numbers.m * 2}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,mul - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.4 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,48.6 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,35.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,24.8 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.4 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,48.6 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,35.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,24.8 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -163,12 +163,12 @@ async fn test_f64_mul_literal() { async fn test_i64_div() { insta::assert_snapshot!(QueryFixture::new("{ div: Numbers.m / Numbers.n}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,div - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,8 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,8 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } @@ -176,12 +176,12 @@ async fn test_i64_div() { async fn test_f64_div() { insta::assert_snapshot!(QueryFixture::new("{ div: Numbers.m / Numbers.n }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,div - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,0.52 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,6.230769230769231 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,2.838709677419355 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,0.52 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,6.230769230769231 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,2.838709677419355 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } @@ -189,12 +189,12 @@ async fn test_f64_div() { async fn test_i64_div_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, div: Numbers.m / 2}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,div - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,12 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,8 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,6 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,12 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,8 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,6 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -202,12 +202,12 @@ async fn test_i64_div_literal() { async fn test_f64_div_literal() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, div: Numbers.m / 2}").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,div - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,2.6 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,12.15 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,8.8 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,6.2 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,2.6 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,12.15 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,8.8 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,6.2 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -215,12 +215,12 @@ async fn test_f64_div_literal() { async fn test_i64_neg() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, neg_m: -Numbers.m, neg_n: neg(Numbers.n) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,neg_m,neg_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,-5,-10 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,-24,-3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,-17,-6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,,-9 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,-12, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,-5,-10 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,-24,-3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,-17,-6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,,-9 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,-12, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -228,12 +228,12 @@ async fn test_i64_neg() { async fn test_f64_neg() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, neg_m: -Numbers.m, neg_n: neg(Numbers.n) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,neg_m,neg_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,-5.2,-10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,-24.3,-3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,-17.6,-6.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25,,-9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,,-12.4, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,-5.2,-10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,-24.3,-3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,-17.6,-6.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25,,-9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,,-12.4, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -241,12 +241,12 @@ async fn test_f64_neg() { async fn test_i64_ceil() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, ceil_m: ceil(Numbers.m), ceil_n: ceil(Numbers.n) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,ceil_m,ceil_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,5,10 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,24,3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,17,6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,,9 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,12, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,5,10 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,24,3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,17,6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,,9 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,12, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -254,12 +254,12 @@ async fn test_i64_ceil() { async fn test_f64_ceil() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, ceil_m: ceil(Numbers.m), ceil_n: ceil(Numbers.n) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,ceil_m,ceil_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,6.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,25.0,4.0 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,18.0,7.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25,,10.0 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,,13.0, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,6.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,25.0,4.0 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,18.0,7.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25,,10.0 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,,13.0, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -267,12 +267,12 @@ async fn test_f64_ceil() { async fn test_i64_round() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, round_m: round(Numbers.m), round_n: round(Numbers.n) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,round_m,round_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,5,10 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,24,3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,17,6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,,9 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,12, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,5,10 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,24,3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,17,6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,,9 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,12, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -280,12 +280,12 @@ async fn test_i64_round() { async fn test_f64_round() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, round_m: round(Numbers.m), round_n: round(Numbers.n) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,round_m,round_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,5.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,24.0,4.0 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,18.0,6.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25,,9.0 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,,12.0, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,5.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,24.0,4.0 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,18.0,6.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25,,9.0 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,,12.0, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -293,12 +293,12 @@ async fn test_f64_round() { async fn test_i64_floor() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, floor_m: floor(Numbers.m), floor_n: floor(Numbers.n) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,floor_m,floor_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,5,10 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,24,3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,17,6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,,9 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,12, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,5,10 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,24,3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,17,6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,,9 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,12, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -306,12 +306,12 @@ async fn test_i64_floor() { async fn test_f64_floor() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, n: Numbers.n, floor_m: floor(Numbers.m), floor_n: floor(Numbers.n) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,floor_m,floor_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,5.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,24.0,3.0 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,17.0,6.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25,,9.0 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,,12.0, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,5.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,24.0,3.0 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,17.0,6.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25,,9.0 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,,12.0, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -323,12 +323,12 @@ async fn test_i64_zip_min() { , zip_min_2_n: zip_min(2, Numbers.n) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,zip_min,zip_min_2_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,5,2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,3,2 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,6,2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,,2 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,5,2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,3,2 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,6,2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -340,12 +340,12 @@ async fn test_f64_zip_min() { , zip_min_2_n: zip_min(2, Numbers.n) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,zip_min,zip_min_2_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,5.2,2.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,3.9,2.0 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,6.2,2.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25,,2.0 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,5.2,2.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,3.9,2.0 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,6.2,2.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25,,2.0 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -357,12 +357,12 @@ async fn test_i64_zip_max() { , zip_max_2_n: zip_max(2, Numbers.n) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,zip_max,zip_max_2_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,10,10 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,24,3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,17,6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,,9 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,10,10 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,24,3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,17,6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,,9 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -374,12 +374,12 @@ async fn test_f64_zip_max() { , zip_max_2_n: zip_max(2, Numbers.n) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,zip_max,zip_max_2_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,10.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,24.3,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,17.6,6.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25,,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,10.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,24.3,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,17.6,6.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25,,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -392,12 +392,12 @@ async fn test_i64_powf() { , pow_n_2: powf(Numbers.n, 2) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,pow_m_n,pow_2_m,pow_n_2 - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,9765625.0,32.0,100.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,13824.0,16777216.0,9.0 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,24137569.0,131072.0,36.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,,,81.0 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,,4096.0, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,9765625.0,32.0,100.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,13824.0,16777216.0,9.0 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,24137569.0,131072.0,36.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,,,81.0 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,,4096.0, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,,, "###); } @@ -410,12 +410,12 @@ async fn test_f64_powf() { , pow_n_2: powf(Numbers.n, 2) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,pow_m_n,pow_2_m,pow_n_2 - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,14455510.594905708,36.75834735990512,100.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,253433.76006548494,20655175.749880955,15.209999999999999 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,52744665.25889734,198668.0018056511,38.440000000000005 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25,,,85.5625 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,,,5404.7044025257765, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,14455510.594905708,36.75834735990512,100.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,253433.76006548494,20655175.749880955,15.209999999999999 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,52744665.25889734,198668.0018056511,38.440000000000005 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25,,,85.5625 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,,,5404.7044025257765, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,,, "###); } @@ -427,12 +427,12 @@ async fn test_i64_exp() { , exp_n: exp(Numbers.n) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,exp_m,exp_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,148.4131591025766,22026.465794806718 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,2.648912212984347e10,20.085536923187668 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,24154952.7535753,403.4287934927351 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,,8103.083927575384 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,162754.79141900392, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,148.4131591025766,22026.465794806718 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,2.648912212984347e10,20.085536923187668 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,24154952.7535753,403.4287934927351 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,,8103.083927575384 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,162754.79141900392, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -444,12 +444,12 @@ async fn test_f64_exp() { , exp_n: exp(Numbers.n) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,exp_m,exp_n - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,181.27224187515122,22026.465794806718 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,3.575657481192565e10,49.40244910553017 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,44013193.53483411,492.7490410932563 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25,,10404.565716560723 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,,242801.61749832364, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,181.27224187515122,22026.465794806718 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,3.575657481192565e10,49.40244910553017 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,44013193.53483411,492.7490410932563 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25,,10404.565716560723 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,,242801.61749832364, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,, "###); } @@ -463,12 +463,12 @@ async fn test_i64_clamp() { , clamp_m_n_max: clamp(Numbers.m, max = Numbers.n) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,clamp_m_2_n,clamp_m_2_10,clamp_m_n_min,clamp_m_n_max - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,10,5,5,10,5 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,3,3,10,24,3 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,6,6,10,17,6 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9,,,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,,12,10,12,12 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,10,5,5,10,5 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,3,3,10,24,3 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,6,6,10,17,6 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9,,,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,,12,10,12,12 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,,,, "###); } @@ -482,12 +482,12 @@ async fn test_f64_clamp() { , clamp_m_n_max: clamp(Numbers.m, max = Numbers.n) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,n,clamp_m_2_n,clamp_m_2_10,clamp_m_n_min,clamp_m_n_max - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,10.0,5.2,5.2,10.0,5.2 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,3.9,3.9,10.2,24.3,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,6.2,6.2,10.2,17.6,6.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,9.25,,,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,,12.4,10.2,12.4,12.4 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,,,,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,10.0,5.2,5.2,10.0,5.2 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,3.9,3.9,10.2,24.3,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,6.2,6.2,10.2,17.6,6.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,9.25,,,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,,12.4,10.2,12.4,12.4 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,,,,, "###); } @@ -495,12 +495,12 @@ async fn test_f64_clamp() { async fn test_f64_sqrt() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, sqrt: sqrt(Numbers.m) }").run_to_csv(&f64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,sqrt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5.2,2.280350850198276 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24.3,4.929503017546495 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17.6,4.1952353926806065 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.4,3.521363372331802 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5.2,2.280350850198276 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24.3,4.929503017546495 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17.6,4.1952353926806065 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.4,3.521363372331802 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } @@ -508,11 +508,11 @@ async fn test_f64_sqrt() { async fn test_i64_sqrt() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, sqrt: sqrt(Numbers.m) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,sqrt - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,2.23606797749979 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24,4.898979485566356 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,4.123105625617661 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,3.4641016151377544 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,2.23606797749979 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24,4.898979485566356 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,4.123105625617661 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,3.4641016151377544 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, "###); } diff --git a/crates/sparrow-main/tests/e2e/multiple_tables.rs b/crates/sparrow-main/tests/e2e/multiple_tables.rs index 9a1d0038d..aee311dbf 100644 --- a/crates/sparrow-main/tests/e2e/multiple_tables.rs +++ b/crates/sparrow-main/tests/e2e/multiple_tables.rs @@ -148,13 +148,13 @@ async fn test_identical_table_join() { .run_to_csv(&test_data).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t1_val,t2_val,sum - 2022-01-03T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1,1,2 - 2022-01-03T01:39:57.000000000,9223372036854775808,11753611437813598533,B,10,10,20 - 2022-01-03T02:39:57.000000000,9223372036854775808,3650215962958587783,A,2,2,4 - 2022-01-03T03:39:57.000000000,9223372036854775808,11753611437813598533,B,20,20,40 - 2022-01-03T04:39:57.000000000,9223372036854775808,9192031977313001967,C,100,100,200 - 2022-01-03T05:39:57.000000000,9223372036854775808,9192031977313001967,C,200,200,400 - 2022-01-03T06:39:57.000000000,9223372036854775808,3650215962958587783,A,3,3,6 + 2022-01-03T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1,1,2 + 2022-01-03T01:39:57.000000000,9223372036854775808,2867199309159137213,B,10,10,20 + 2022-01-03T02:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2,4 + 2022-01-03T03:39:57.000000000,9223372036854775808,2867199309159137213,B,20,20,40 + 2022-01-03T04:39:57.000000000,9223372036854775808,2521269998124177631,C,100,100,200 + 2022-01-03T05:39:57.000000000,9223372036854775808,2521269998124177631,C,200,200,400 + 2022-01-03T06:39:57.000000000,9223372036854775808,12960666915911099378,A,3,3,6 "### ); } @@ -168,20 +168,20 @@ async fn test_tables_different_dates() { .run_to_csv(&purchase_tables_two_days().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t1_val,t2_val,sum - 2022-01-03T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1,, - 2022-01-03T01:39:57.000000000,9223372036854775808,11753611437813598533,B,10,, - 2022-01-03T02:39:57.000000000,9223372036854775808,3650215962958587783,A,2,, - 2022-01-03T03:39:57.000000000,9223372036854775808,11753611437813598533,B,20,, - 2022-01-03T04:39:57.000000000,9223372036854775808,9192031977313001967,C,100,, - 2022-01-03T05:39:57.000000000,9223372036854775808,9192031977313001967,C,200,, - 2022-01-03T06:39:57.000000000,9223372036854775808,3650215962958587783,A,3,, - 2022-01-03T18:39:57.000000000,9223372036854775808,11753611437813598533,B,,5, - 2022-01-03T20:39:57.000000000,9223372036854775808,3650215962958587783,A,,4, - 2022-01-03T21:39:57.000000000,9223372036854775808,11753611437813598533,B,,15, - 2022-01-03T22:39:57.000000000,9223372036854775808,3650215962958587783,A,,8, - 2022-01-03T23:39:57.000000000,9223372036854775808,9192031977313001967,C,,100, - 2022-01-04T00:39:57.000000000,9223372036854775808,9192031977313001967,C,,200, - 2022-01-04T01:39:57.000000000,9223372036854775808,11753611437813598533,B,,20, + 2022-01-03T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1,, + 2022-01-03T01:39:57.000000000,9223372036854775808,2867199309159137213,B,10,, + 2022-01-03T02:39:57.000000000,9223372036854775808,12960666915911099378,A,2,, + 2022-01-03T03:39:57.000000000,9223372036854775808,2867199309159137213,B,20,, + 2022-01-03T04:39:57.000000000,9223372036854775808,2521269998124177631,C,100,, + 2022-01-03T05:39:57.000000000,9223372036854775808,2521269998124177631,C,200,, + 2022-01-03T06:39:57.000000000,9223372036854775808,12960666915911099378,A,3,, + 2022-01-03T18:39:57.000000000,9223372036854775808,2867199309159137213,B,,5, + 2022-01-03T20:39:57.000000000,9223372036854775808,12960666915911099378,A,,4, + 2022-01-03T21:39:57.000000000,9223372036854775808,2867199309159137213,B,,15, + 2022-01-03T22:39:57.000000000,9223372036854775808,12960666915911099378,A,,8, + 2022-01-03T23:39:57.000000000,9223372036854775808,2521269998124177631,C,,100, + 2022-01-04T00:39:57.000000000,9223372036854775808,2521269998124177631,C,,200, + 2022-01-04T01:39:57.000000000,9223372036854775808,2867199309159137213,B,,20, "###); } @@ -194,20 +194,20 @@ async fn test_tables_no_overlapping_keys() { .run_to_csv(&purchase_tables_different_keys().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t1_val,t2_val,sum - 2022-01-03T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1,, - 2022-01-03T00:39:57.000000000,9223372036854775808,3721291248765124248,X,,1, - 2022-01-03T01:39:57.000000000,9223372036854775808,2267016691031950555,Y,,10, - 2022-01-03T01:39:57.000000000,9223372036854775808,11753611437813598533,B,10,, - 2022-01-03T02:39:57.000000000,9223372036854775808,3650215962958587783,A,2,, - 2022-01-03T02:39:57.000000000,9223372036854775808,3721291248765124248,X,,2, - 2022-01-03T03:39:57.000000000,9223372036854775808,2267016691031950555,Y,,20, - 2022-01-03T03:39:57.000000000,9223372036854775808,11753611437813598533,B,20,, - 2022-01-03T04:39:57.000000000,9223372036854775808,9192031977313001967,C,100,, - 2022-01-03T04:39:57.000000000,9223372036854775808,11266950221160544544,Z,,100, - 2022-01-03T05:39:57.000000000,9223372036854775808,9192031977313001967,C,200,, - 2022-01-03T05:39:57.000000000,9223372036854775808,11266950221160544544,Z,,200, - 2022-01-03T06:39:57.000000000,9223372036854775808,3650215962958587783,A,3,, - 2022-01-03T06:39:57.000000000,9223372036854775808,3721291248765124248,X,,3, + 2022-01-03T00:39:57.000000000,9223372036854775808,5844668342709334339,X,,1, + 2022-01-03T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1,, + 2022-01-03T01:39:57.000000000,9223372036854775808,2867199309159137213,B,10,, + 2022-01-03T01:39:57.000000000,9223372036854775808,8493950773958210388,Y,,10, + 2022-01-03T02:39:57.000000000,9223372036854775808,5844668342709334339,X,,2, + 2022-01-03T02:39:57.000000000,9223372036854775808,12960666915911099378,A,2,, + 2022-01-03T03:39:57.000000000,9223372036854775808,2867199309159137213,B,20,, + 2022-01-03T03:39:57.000000000,9223372036854775808,8493950773958210388,Y,,20, + 2022-01-03T04:39:57.000000000,9223372036854775808,2521269998124177631,C,100,, + 2022-01-03T04:39:57.000000000,9223372036854775808,5050198837546418057,Z,,100, + 2022-01-03T05:39:57.000000000,9223372036854775808,2521269998124177631,C,200,, + 2022-01-03T05:39:57.000000000,9223372036854775808,5050198837546418057,Z,,200, + 2022-01-03T06:39:57.000000000,9223372036854775808,5844668342709334339,X,,3, + 2022-01-03T06:39:57.000000000,9223372036854775808,12960666915911099378,A,3,, "###); } @@ -220,17 +220,17 @@ async fn test_tables_overlapping_keys() { .run_to_csv(&purchase_tables_overlapping_keys().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t1_val,t2_val,sum - 2022-01-03T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1,, - 2022-01-03T00:39:57.000000000,9223372036854775808,3721291248765124248,X,,1, - 2022-01-03T01:39:57.000000000,9223372036854775808,11753611437813598533,B,10,10,20 - 2022-01-03T02:39:57.000000000,9223372036854775808,3650215962958587783,A,2,2,4 - 2022-01-03T03:39:57.000000000,9223372036854775808,2267016691031950555,Y,,20, - 2022-01-03T03:39:57.000000000,9223372036854775808,11753611437813598533,B,20,, - 2022-01-03T04:39:57.000000000,9223372036854775808,9192031977313001967,C,100,100,200 - 2022-01-03T05:39:57.000000000,9223372036854775808,9192031977313001967,C,200,, - 2022-01-03T05:39:57.000000000,9223372036854775808,11266950221160544544,Z,,200, - 2022-01-03T06:39:57.000000000,9223372036854775808,3650215962958587783,A,3,, - 2022-01-03T06:39:57.000000000,9223372036854775808,3721291248765124248,X,,3, + 2022-01-03T00:39:57.000000000,9223372036854775808,5844668342709334339,X,,1, + 2022-01-03T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1,, + 2022-01-03T01:39:57.000000000,9223372036854775808,2867199309159137213,B,10,10,20 + 2022-01-03T02:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2,4 + 2022-01-03T03:39:57.000000000,9223372036854775808,2867199309159137213,B,20,, + 2022-01-03T03:39:57.000000000,9223372036854775808,8493950773958210388,Y,,20, + 2022-01-03T04:39:57.000000000,9223372036854775808,2521269998124177631,C,100,100,200 + 2022-01-03T05:39:57.000000000,9223372036854775808,2521269998124177631,C,200,, + 2022-01-03T05:39:57.000000000,9223372036854775808,5050198837546418057,Z,,200, + 2022-01-03T06:39:57.000000000,9223372036854775808,5844668342709334339,X,,3, + 2022-01-03T06:39:57.000000000,9223372036854775808,12960666915911099378,A,3,, "###); } @@ -243,21 +243,21 @@ async fn test_tables_superset() { .run_to_csv(&purchase_tables_superset().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t1_val,t2_val,sum - 2022-01-03T00:37:57.000000000,9223372036854775808,11753611437813598533,B,,1, - 2022-01-03T00:38:57.000000000,9223372036854775808,3650215962958587783,A,,1, - 2022-01-03T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1,1,2 - 2022-01-03T00:39:58.000000000,9223372036854775808,3650215962958587783,A,,666, - 2022-01-03T01:39:57.000000000,9223372036854775808,11753611437813598533,B,10,10,20 - 2022-01-03T01:40:57.000000000,9223372036854775808,11753611437813598533,B,,1000, - 2022-01-03T02:38:57.000000000,9223372036854775808,3650215962958587783,A,,777, - 2022-01-03T02:39:57.000000000,9223372036854775808,3650215962958587783,A,2,2,4 - 2022-01-03T02:44:57.000000000,9223372036854775808,3650215962958587783,A,,888, - 2022-01-03T03:39:57.000000000,9223372036854775808,11753611437813598533,B,20,20,40 - 2022-01-03T04:39:57.000000000,9223372036854775808,9192031977313001967,C,100,100,200 - 2022-01-03T05:39:57.000000000,9223372036854775808,9192031977313001967,C,200,200,400 - 2022-01-03T06:39:57.000000000,9223372036854775808,3650215962958587783,A,3,3,6 - 2022-01-03T06:43:57.000000000,9223372036854775808,11753611437813598533,B,,3, - 2022-01-03T06:45:57.000000000,9223372036854775808,9192031977313001967,C,,3, + 2022-01-03T00:37:57.000000000,9223372036854775808,2867199309159137213,B,,1, + 2022-01-03T00:38:57.000000000,9223372036854775808,12960666915911099378,A,,1, + 2022-01-03T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1,1,2 + 2022-01-03T00:39:58.000000000,9223372036854775808,12960666915911099378,A,,666, + 2022-01-03T01:39:57.000000000,9223372036854775808,2867199309159137213,B,10,10,20 + 2022-01-03T01:40:57.000000000,9223372036854775808,2867199309159137213,B,,1000, + 2022-01-03T02:38:57.000000000,9223372036854775808,12960666915911099378,A,,777, + 2022-01-03T02:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2,4 + 2022-01-03T02:44:57.000000000,9223372036854775808,12960666915911099378,A,,888, + 2022-01-03T03:39:57.000000000,9223372036854775808,2867199309159137213,B,20,20,40 + 2022-01-03T04:39:57.000000000,9223372036854775808,2521269998124177631,C,100,100,200 + 2022-01-03T05:39:57.000000000,9223372036854775808,2521269998124177631,C,200,200,400 + 2022-01-03T06:39:57.000000000,9223372036854775808,12960666915911099378,A,3,3,6 + 2022-01-03T06:43:57.000000000,9223372036854775808,2867199309159137213,B,,3, + 2022-01-03T06:45:57.000000000,9223372036854775808,2521269998124177631,C,,3, "###); } @@ -275,13 +275,13 @@ async fn test_triple_add_same_table() { .run_to_csv(&test_data).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t1_val,t2_val,sum - 2022-01-03T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1,1,3 - 2022-01-03T01:39:57.000000000,9223372036854775808,11753611437813598533,B,10,10,30 - 2022-01-03T02:39:57.000000000,9223372036854775808,3650215962958587783,A,2,2,6 - 2022-01-03T03:39:57.000000000,9223372036854775808,11753611437813598533,B,20,20,60 - 2022-01-03T04:39:57.000000000,9223372036854775808,9192031977313001967,C,100,100,300 - 2022-01-03T05:39:57.000000000,9223372036854775808,9192031977313001967,C,200,200,600 - 2022-01-03T06:39:57.000000000,9223372036854775808,3650215962958587783,A,3,3,9 + 2022-01-03T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1,1,3 + 2022-01-03T01:39:57.000000000,9223372036854775808,2867199309159137213,B,10,10,30 + 2022-01-03T02:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2,6 + 2022-01-03T03:39:57.000000000,9223372036854775808,2867199309159137213,B,20,20,60 + 2022-01-03T04:39:57.000000000,9223372036854775808,2521269998124177631,C,100,100,300 + 2022-01-03T05:39:57.000000000,9223372036854775808,2521269998124177631,C,200,200,600 + 2022-01-03T06:39:57.000000000,9223372036854775808,12960666915911099378,A,3,3,9 "###); } @@ -303,25 +303,25 @@ async fn test_triple_add_different_tables() { .run_to_csv(&test_data).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t1_val,t2_val,t3_val,sum - 2022-01-03T00:37:57.000000000,9223372036854775808,11753611437813598533,B,,1,, - 2022-01-03T00:38:57.000000000,9223372036854775808,3650215962958587783,A,,1,, - 2022-01-03T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1,1,, - 2022-01-03T00:39:57.000000000,9223372036854775808,3721291248765124248,X,,,1, - 2022-01-03T00:39:58.000000000,9223372036854775808,3650215962958587783,A,,666,, - 2022-01-03T01:39:57.000000000,9223372036854775808,11753611437813598533,B,10,10,10,30 - 2022-01-03T01:40:57.000000000,9223372036854775808,11753611437813598533,B,,1000,, - 2022-01-03T02:38:57.000000000,9223372036854775808,3650215962958587783,A,,777,, - 2022-01-03T02:39:57.000000000,9223372036854775808,3650215962958587783,A,2,2,2,6 - 2022-01-03T02:44:57.000000000,9223372036854775808,3650215962958587783,A,,888,, - 2022-01-03T03:39:57.000000000,9223372036854775808,2267016691031950555,Y,,,20, - 2022-01-03T03:39:57.000000000,9223372036854775808,11753611437813598533,B,20,20,, - 2022-01-03T04:39:57.000000000,9223372036854775808,9192031977313001967,C,100,100,100,300 - 2022-01-03T05:39:57.000000000,9223372036854775808,9192031977313001967,C,200,200,, - 2022-01-03T05:39:57.000000000,9223372036854775808,11266950221160544544,Z,,,200, - 2022-01-03T06:39:57.000000000,9223372036854775808,3650215962958587783,A,3,3,, - 2022-01-03T06:39:57.000000000,9223372036854775808,3721291248765124248,X,,,3, - 2022-01-03T06:43:57.000000000,9223372036854775808,11753611437813598533,B,,3,, - 2022-01-03T06:45:57.000000000,9223372036854775808,9192031977313001967,C,,3,, + 2022-01-03T00:37:57.000000000,9223372036854775808,2867199309159137213,B,,1,, + 2022-01-03T00:38:57.000000000,9223372036854775808,12960666915911099378,A,,1,, + 2022-01-03T00:39:57.000000000,9223372036854775808,5844668342709334339,X,,,1, + 2022-01-03T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1,1,, + 2022-01-03T00:39:58.000000000,9223372036854775808,12960666915911099378,A,,666,, + 2022-01-03T01:39:57.000000000,9223372036854775808,2867199309159137213,B,10,10,10,30 + 2022-01-03T01:40:57.000000000,9223372036854775808,2867199309159137213,B,,1000,, + 2022-01-03T02:38:57.000000000,9223372036854775808,12960666915911099378,A,,777,, + 2022-01-03T02:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2,2,6 + 2022-01-03T02:44:57.000000000,9223372036854775808,12960666915911099378,A,,888,, + 2022-01-03T03:39:57.000000000,9223372036854775808,2867199309159137213,B,20,20,, + 2022-01-03T03:39:57.000000000,9223372036854775808,8493950773958210388,Y,,,20, + 2022-01-03T04:39:57.000000000,9223372036854775808,2521269998124177631,C,100,100,100,300 + 2022-01-03T05:39:57.000000000,9223372036854775808,2521269998124177631,C,200,200,, + 2022-01-03T05:39:57.000000000,9223372036854775808,5050198837546418057,Z,,,200, + 2022-01-03T06:39:57.000000000,9223372036854775808,5844668342709334339,X,,,3, + 2022-01-03T06:39:57.000000000,9223372036854775808,12960666915911099378,A,3,3,, + 2022-01-03T06:43:57.000000000,9223372036854775808,2867199309159137213,B,,3,, + 2022-01-03T06:45:57.000000000,9223372036854775808,2521269998124177631,C,,3,, "###); } @@ -343,24 +343,24 @@ async fn test_3_tables_with_3_additions_with_1_common_operand() { .run_to_csv(&test_data).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t1_val,t2_val,t3_val,sum - 2022-01-03T00:37:57.000000000,9223372036854775808,11753611437813598533,B,,1,, - 2022-01-03T00:38:57.000000000,9223372036854775808,3650215962958587783,A,,1,, - 2022-01-03T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1,1,, - 2022-01-03T00:39:57.000000000,9223372036854775808,3721291248765124248,X,,,1, - 2022-01-03T00:39:58.000000000,9223372036854775808,3650215962958587783,A,,666,, - 2022-01-03T01:39:57.000000000,9223372036854775808,11753611437813598533,B,10,10,10,40 - 2022-01-03T01:40:57.000000000,9223372036854775808,11753611437813598533,B,,1000,, - 2022-01-03T02:38:57.000000000,9223372036854775808,3650215962958587783,A,,777,, - 2022-01-03T02:39:57.000000000,9223372036854775808,3650215962958587783,A,2,2,2,8 - 2022-01-03T02:44:57.000000000,9223372036854775808,3650215962958587783,A,,888,, - 2022-01-03T03:39:57.000000000,9223372036854775808,2267016691031950555,Y,,,20, - 2022-01-03T03:39:57.000000000,9223372036854775808,11753611437813598533,B,20,20,, - 2022-01-03T04:39:57.000000000,9223372036854775808,9192031977313001967,C,100,100,100,400 - 2022-01-03T05:39:57.000000000,9223372036854775808,9192031977313001967,C,200,200,, - 2022-01-03T05:39:57.000000000,9223372036854775808,11266950221160544544,Z,,,200, - 2022-01-03T06:39:57.000000000,9223372036854775808,3650215962958587783,A,3,3,, - 2022-01-03T06:39:57.000000000,9223372036854775808,3721291248765124248,X,,,3, - 2022-01-03T06:43:57.000000000,9223372036854775808,11753611437813598533,B,,3,, - 2022-01-03T06:45:57.000000000,9223372036854775808,9192031977313001967,C,,3,, + 2022-01-03T00:37:57.000000000,9223372036854775808,2867199309159137213,B,,1,, + 2022-01-03T00:38:57.000000000,9223372036854775808,12960666915911099378,A,,1,, + 2022-01-03T00:39:57.000000000,9223372036854775808,5844668342709334339,X,,,1, + 2022-01-03T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1,1,, + 2022-01-03T00:39:58.000000000,9223372036854775808,12960666915911099378,A,,666,, + 2022-01-03T01:39:57.000000000,9223372036854775808,2867199309159137213,B,10,10,10,40 + 2022-01-03T01:40:57.000000000,9223372036854775808,2867199309159137213,B,,1000,, + 2022-01-03T02:38:57.000000000,9223372036854775808,12960666915911099378,A,,777,, + 2022-01-03T02:39:57.000000000,9223372036854775808,12960666915911099378,A,2,2,2,8 + 2022-01-03T02:44:57.000000000,9223372036854775808,12960666915911099378,A,,888,, + 2022-01-03T03:39:57.000000000,9223372036854775808,2867199309159137213,B,20,20,, + 2022-01-03T03:39:57.000000000,9223372036854775808,8493950773958210388,Y,,,20, + 2022-01-03T04:39:57.000000000,9223372036854775808,2521269998124177631,C,100,100,100,400 + 2022-01-03T05:39:57.000000000,9223372036854775808,2521269998124177631,C,200,200,, + 2022-01-03T05:39:57.000000000,9223372036854775808,5050198837546418057,Z,,,200, + 2022-01-03T06:39:57.000000000,9223372036854775808,5844668342709334339,X,,,3, + 2022-01-03T06:39:57.000000000,9223372036854775808,12960666915911099378,A,3,3,, + 2022-01-03T06:43:57.000000000,9223372036854775808,2867199309159137213,B,,3,, + 2022-01-03T06:45:57.000000000,9223372036854775808,2521269998124177631,C,,3,, "###); } diff --git a/crates/sparrow-main/tests/e2e/notebooks/documentation_code_tests.rs b/crates/sparrow-main/tests/e2e/notebooks/documentation_code_tests.rs index b40127be1..58d28e51a 100644 --- a/crates/sparrow-main/tests/e2e/notebooks/documentation_code_tests.rs +++ b/crates/sparrow-main/tests/e2e/notebooks/documentation_code_tests.rs @@ -77,15 +77,15 @@ async fn test_feature_query() { .await.unwrap(), @r###" _time,_subsort,_key_hash,_key,target,entity,purchase_total,mean_purchase - 2020-01-31T00:00:00.000000000,0,12403606648443509540,cb_001,,cb_001,9,9.0 - 2020-01-31T00:00:00.000000000,1,3378313965393548399,kk_001,,kk_001,3,3.0 - 2020-02-01T00:00:00.000000000,2,10125507257854989870,cb_002,,cb_002,2,5.5 - 2020-02-01T00:00:00.000000000,3,12726274317228806426,kk_002,,kk_002,5,4.0 - 2020-02-02T00:00:00.000000000,4,3211606271575657135,cb_003,,cb_003,4,5.0 - 2020-02-02T00:00:00.000000000,5,8266159855839342533,kk_003,,kk_003,12,6.666666666666666 - 2020-02-03T00:00:00.000000000,6,10823917382312772685,cb_004,1,cb_004,5000,1255.0 - 2020-02-03T00:00:00.000000000,7,15196731045994046513,cb_005,,cb_005,3,4.5 - 2020-02-04T00:00:00.000000000,8,14310739973228482455,cb_006,,cb_006,5,4.6 - 2020-02-04T00:00:00.000000000,9,13038643822299281194,kk_004,,kk_004,9,1005.8 + 2020-01-31T00:00:00.000000000,0,9739869918241705874,cb_001,,cb_001,9,9.0 + 2020-01-31T00:00:00.000000000,1,8162965343037296454,kk_001,,kk_001,3,3.0 + 2020-02-01T00:00:00.000000000,2,16771742781439526807,cb_002,,cb_002,2,5.5 + 2020-02-01T00:00:00.000000000,3,16781727739679749928,kk_002,,kk_002,5,4.0 + 2020-02-02T00:00:00.000000000,4,8818192394727837511,cb_003,,cb_003,4,5.0 + 2020-02-02T00:00:00.000000000,5,11581512207510796157,kk_003,,kk_003,12,6.666666666666666 + 2020-02-03T00:00:00.000000000,6,2404661809800283400,cb_004,1,cb_004,5000,1255.0 + 2020-02-03T00:00:00.000000000,7,15509676972665496364,cb_005,,cb_005,3,4.5 + 2020-02-04T00:00:00.000000000,8,5969313252504815416,cb_006,,cb_006,5,4.6 + 2020-02-04T00:00:00.000000000,9,9220563600602609354,kk_004,,kk_004,9,1005.8 "###); } diff --git a/crates/sparrow-main/tests/e2e/notebooks/event_data_tests.rs b/crates/sparrow-main/tests/e2e/notebooks/event_data_tests.rs index 9b0f0b185..a21919171 100644 --- a/crates/sparrow-main/tests/e2e/notebooks/event_data_tests.rs +++ b/crates/sparrow-main/tests/e2e/notebooks/event_data_tests.rs @@ -38,7 +38,7 @@ async fn test_initial_query() { // Regression test for a take on a null array assert_eq!(no_simplifier, simplifier); - insta::assert_snapshot!(no_simplifier, @"05C46CFB8589A52711CBB3E8A81CBC4C0F5D69C6C72E7B4C6B738CBC"); + insta::assert_snapshot!(no_simplifier, @"D50CA894B17571359A5E0F4527E3E26A2A74CC48CAC97065E9331E72"); } const EVENTS: &str = indoc! {" @@ -111,52 +111,52 @@ async fn test_events() { insta::assert_snapshot!(QueryFixture::new(EVENTS).run_to_csv(&sample_event_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,id,count_today,locale_score_sliding - 2020-10-27T16:03:28.331000000,9223372036854775808,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,1,1 - 2020-10-27T17:24:17.956000000,9223372036854775811,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,2,2 - 2020-10-27T17:24:17.967000000,9223372036854775813,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,3,3 - 2020-10-27T17:24:17.967000000,9223372036854775814,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,4,4 - 2020-10-27T17:25:45.242000000,9223372036854775829,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,5,5 - 2020-10-27T17:25:45.248000000,9223372036854775830,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,6,6 - 2020-10-27T17:25:53.717000000,9223372036854775839,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,7,7 - 2020-10-27T17:26:25.213000000,9223372036854775854,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,8,8 - 2020-10-27T17:26:35.816000000,9223372036854775855,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,9,9 - 2020-10-27T17:26:49.665000000,9223372036854775856,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,10,10 - 2020-10-27T17:29:35.525000000,9223372036854775857,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,11,11 - 2020-10-27T17:30:21.233000000,9223372036854775859,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,12,12 - 2020-10-27T17:32:36.646000000,9223372036854775860,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,13,13 - 2020-10-27T17:33:55.353000000,9223372036854775867,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,14,14 - 2020-10-27T17:34:03.546000000,9223372036854775868,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,15,15 - 2020-10-27T17:35:39.310000000,9223372036854775869,7966499180359851935,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,1,1 - 2020-10-27T17:35:39.311000000,9223372036854775870,7966499180359851935,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,2,2 - 2020-10-27T17:35:47.195000000,9223372036854775881,7966499180359851935,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,3,3 - 2020-10-27T17:35:47.201000000,9223372036854775882,7966499180359851935,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,4,4 - 2020-10-27T17:36:30.940000000,9223372036854775897,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,16,16 - 2020-10-27T17:36:31.894000000,9223372036854775898,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,1,1 - 2020-10-27T17:36:31.894000000,9223372036854775899,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,2,2 - 2020-10-27T17:36:31.895000000,9223372036854775900,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,3,3 - 2020-10-27T17:36:35.873000000,9223372036854775909,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,4,4 - 2020-10-27T17:36:36.031000000,9223372036854775918,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,5,5 - 2020-10-27T17:36:37.360000000,9223372036854775919,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,6,6 - 2020-10-27T17:36:37.453000000,9223372036854775920,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,7,7 - 2020-10-27T17:36:38.193000000,9223372036854775921,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,8,8 - 2020-10-27T17:36:38.259000000,9223372036854775922,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,9,9 - 2020-10-27T17:36:38.923000000,9223372036854775923,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,10,10 - 2020-10-27T17:36:39.012000000,9223372036854775924,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,11,11 - 2020-10-27T17:36:41.397000000,9223372036854775925,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,12,12 - 2020-10-27T17:36:41.916000000,9223372036854775926,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,13,13 - 2020-10-27T17:36:41.980000000,9223372036854775927,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,14,14 - 2020-10-27T17:36:42.939000000,9223372036854775928,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,15,15 - 2020-10-27T17:36:43.652000000,9223372036854775929,7966499180359851935,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,5,5 - 2020-10-27T17:36:43.862000000,9223372036854775930,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,16,16 - 2020-10-27T17:36:43.927000000,9223372036854775931,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,17,17 - 2020-10-27T17:36:47.068000000,9223372036854775934,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,18,18 - 2020-10-27T17:36:48.517000000,9223372036854775935,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,19,19 - 2020-10-27T17:36:52.086000000,9223372036854775938,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,20,20 - 2020-10-27T17:36:52.145000000,9223372036854775939,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,21,21 - 2020-10-27T17:36:52.548000000,9223372036854775940,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,22,22 - 2020-10-27T17:36:52.629000000,9223372036854775941,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,23,23 - 2020-10-27T17:36:57.093000000,9223372036854775942,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,17,17 - 2020-10-27T17:36:57.104000000,9223372036854775943,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,18,18 + 2020-10-27T16:03:28.331000000,9223372036854775808,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,1,1 + 2020-10-27T17:24:17.956000000,9223372036854775811,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,2,2 + 2020-10-27T17:24:17.967000000,9223372036854775813,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,3,3 + 2020-10-27T17:24:17.967000000,9223372036854775814,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,4,4 + 2020-10-27T17:25:45.242000000,9223372036854775829,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,5,5 + 2020-10-27T17:25:45.248000000,9223372036854775830,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,6,6 + 2020-10-27T17:25:53.717000000,9223372036854775839,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,7,7 + 2020-10-27T17:26:25.213000000,9223372036854775854,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,8,8 + 2020-10-27T17:26:35.816000000,9223372036854775855,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,9,9 + 2020-10-27T17:26:49.665000000,9223372036854775856,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,10,10 + 2020-10-27T17:29:35.525000000,9223372036854775857,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,11,11 + 2020-10-27T17:30:21.233000000,9223372036854775859,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,12,12 + 2020-10-27T17:32:36.646000000,9223372036854775860,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,13,13 + 2020-10-27T17:33:55.353000000,9223372036854775867,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,14,14 + 2020-10-27T17:34:03.546000000,9223372036854775868,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,15,15 + 2020-10-27T17:35:39.310000000,9223372036854775869,1279197888909376308,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,1,1 + 2020-10-27T17:35:39.311000000,9223372036854775870,1279197888909376308,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,2,2 + 2020-10-27T17:35:47.195000000,9223372036854775881,1279197888909376308,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,3,3 + 2020-10-27T17:35:47.201000000,9223372036854775882,1279197888909376308,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,4,4 + 2020-10-27T17:36:30.940000000,9223372036854775897,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,16,16 + 2020-10-27T17:36:31.894000000,9223372036854775898,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,1,1 + 2020-10-27T17:36:31.894000000,9223372036854775899,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,2,2 + 2020-10-27T17:36:31.895000000,9223372036854775900,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,3,3 + 2020-10-27T17:36:35.873000000,9223372036854775909,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,4,4 + 2020-10-27T17:36:36.031000000,9223372036854775918,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,5,5 + 2020-10-27T17:36:37.360000000,9223372036854775919,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,6,6 + 2020-10-27T17:36:37.453000000,9223372036854775920,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,7,7 + 2020-10-27T17:36:38.193000000,9223372036854775921,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,8,8 + 2020-10-27T17:36:38.259000000,9223372036854775922,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,9,9 + 2020-10-27T17:36:38.923000000,9223372036854775923,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,10,10 + 2020-10-27T17:36:39.012000000,9223372036854775924,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,11,11 + 2020-10-27T17:36:41.397000000,9223372036854775925,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,12,12 + 2020-10-27T17:36:41.916000000,9223372036854775926,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,13,13 + 2020-10-27T17:36:41.980000000,9223372036854775927,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,14,14 + 2020-10-27T17:36:42.939000000,9223372036854775928,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,15,15 + 2020-10-27T17:36:43.652000000,9223372036854775929,1279197888909376308,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,5,5 + 2020-10-27T17:36:43.862000000,9223372036854775930,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,16,16 + 2020-10-27T17:36:43.927000000,9223372036854775931,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,17,17 + 2020-10-27T17:36:47.068000000,9223372036854775934,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,18,18 + 2020-10-27T17:36:48.517000000,9223372036854775935,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,19,19 + 2020-10-27T17:36:52.086000000,9223372036854775938,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,20,20 + 2020-10-27T17:36:52.145000000,9223372036854775939,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,21,21 + 2020-10-27T17:36:52.548000000,9223372036854775940,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,22,22 + 2020-10-27T17:36:52.629000000,9223372036854775941,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,23,23 + 2020-10-27T17:36:57.093000000,9223372036854775942,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,17,17 + 2020-10-27T17:36:57.104000000,9223372036854775943,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,18,18 "###); } @@ -165,9 +165,9 @@ async fn test_events_with_final_results() { insta::assert_snapshot!(QueryFixture::new(EVENTS).with_final_results().run_to_csv(&sample_event_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,id,count_today,locale_score_sliding - 2020-10-27T17:36:57.104000001,18446744073709551615,7966499180359851935,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,5,5 - 2020-10-27T17:36:57.104000001,18446744073709551615,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,23,23 - 2020-10-27T17:36:57.104000001,18446744073709551615,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,18,18 + 2020-10-27T17:36:57.104000001,18446744073709551615,1279197888909376308,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,5,5 + 2020-10-27T17:36:57.104000001,18446744073709551615,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,18,18 + 2020-10-27T17:36:57.104000001,18446744073709551615,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,23,23 "###); } @@ -176,15 +176,15 @@ async fn test_page_event() { insta::assert_snapshot!(QueryFixture::new(PAGE_EVENTS).run_to_csv(&sample_event_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,id,locale_score_sliding - 2020-10-27T16:03:28.331000000,9223372036854775808,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,2.0 - 2020-10-27T17:24:17.967000000,9223372036854775813,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,1.5 - 2020-10-27T17:25:45.242000000,9223372036854775829,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,1.0 - 2020-10-27T17:25:53.717000000,9223372036854775839,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,0.75 - 2020-10-27T17:35:39.310000000,9223372036854775869,7966499180359851935,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,0.0 - 2020-10-27T17:35:47.195000000,9223372036854775881,7966499180359851935,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,0.0 - 2020-10-27T17:36:31.894000000,9223372036854775898,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0.0 - 2020-10-27T17:36:31.894000000,9223372036854775899,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0.0 - 2020-10-27T17:36:57.093000000,9223372036854775942,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,0.8 + 2020-10-27T16:03:28.331000000,9223372036854775808,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,2.0 + 2020-10-27T17:24:17.967000000,9223372036854775813,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,1.5 + 2020-10-27T17:25:45.242000000,9223372036854775829,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,1.0 + 2020-10-27T17:25:53.717000000,9223372036854775839,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,0.75 + 2020-10-27T17:35:39.310000000,9223372036854775869,1279197888909376308,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,0.0 + 2020-10-27T17:35:47.195000000,9223372036854775881,1279197888909376308,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,0.0 + 2020-10-27T17:36:31.894000000,9223372036854775898,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0.0 + 2020-10-27T17:36:31.894000000,9223372036854775899,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0.0 + 2020-10-27T17:36:57.093000000,9223372036854775942,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,0.8 "###); } @@ -193,9 +193,9 @@ async fn test_page_event_with_final_results() { insta::assert_snapshot!(QueryFixture::new(PAGE_EVENTS).with_final_results().run_to_csv(&sample_event_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,id,locale_score_sliding - 2020-10-27T17:36:57.104000001,18446744073709551615,7966499180359851935,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,0.0 - 2020-10-27T17:36:57.104000001,18446744073709551615,11177077458995142934,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0.0 - 2020-10-27T17:36:57.104000001,18446744073709551615,12546332615935823199,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,0.8 + 2020-10-27T17:36:57.104000001,18446744073709551615,1279197888909376308,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,02b9152e-3b25-45cc-b7bb-0d8f98bf7524,0.0 + 2020-10-27T17:36:57.104000001,18446744073709551615,17552223493047837804,8a16beda-c07a-4625-a805-2d28f5934107,8a16beda-c07a-4625-a805-2d28f5934107,0.8 + 2020-10-27T17:36:57.104000001,18446744073709551615,17703029354039803950,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0b00083c-5c1e-47f5-abba-f89b12ae3cf4,0.0 "###); } @@ -248,7 +248,7 @@ async fn test_multiple_distinct_partitions() { insta::assert_snapshot!( hash, - @"669F56697B38FC882FE63E5FEEDDBA34D92FD504732951C4CFD95D4B" + @"F79E4F750EFA4A66D751CE4EE52A7B460FC6B9C4C8D8E09B90DCF464" ); } diff --git a/crates/sparrow-main/tests/e2e/notebooks/gaming_tests.rs b/crates/sparrow-main/tests/e2e/notebooks/gaming_tests.rs index a5e7d26db..31b2523c4 100644 --- a/crates/sparrow-main/tests/e2e/notebooks/gaming_tests.rs +++ b/crates/sparrow-main/tests/e2e/notebooks/gaming_tests.rs @@ -53,7 +53,7 @@ async fn test_gaming_events_to_csv() { insta::assert_snapshot!(QueryFixture::new(GAMING_EVENTS).with_dump_dot("gaming").run_to_csv(&gaming_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,loss_duration - 2022-01-01T03:56:00.000000000,0,17054345325612802246,Bob,11 - 2022-01-01T08:45:00.000000000,0,5902814233694669492,Alice,7 + 2022-01-01T03:56:00.000000000,0,7772866443370847918,Bob,11 + 2022-01-01T08:45:00.000000000,0,17853343368786040891,Alice,7 "###); } diff --git a/crates/sparrow-main/tests/e2e/notebooks/sample_tests.rs b/crates/sparrow-main/tests/e2e/notebooks/sample_tests.rs index fa30782ef..05d643d12 100644 --- a/crates/sparrow-main/tests/e2e/notebooks/sample_tests.rs +++ b/crates/sparrow-main/tests/e2e/notebooks/sample_tests.rs @@ -57,14 +57,14 @@ async fn test_sample_events_to_csv() { insta::assert_snapshot!(QueryFixture::new(SAMPLE_EVENTS).run_to_csv(&sample_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,timestamp_continuous,username_continuous,count_hourly,event_count_total,event_time_not_continuous,event_username_not_continuous - 2022-01-01T12:00:00.000000000,18446744073709551615,9458906648771141622,ada,2022-01-01T12:00:00.000000000,ada,1,1,, - 2022-01-01T12:00:00.000000000,18446744073709551615,13887258915425630658,brian,2022-01-01T12:00:00.000000000,brian,1,1,, - 2022-01-01T13:00:00.000000000,18446744073709551615,9458906648771141622,ada,2022-01-01T12:00:00.000000000,ada,0,1,, - 2022-01-01T13:00:00.000000000,18446744073709551615,13887258915425630658,brian,2022-01-01T12:00:00.000000000,brian,0,1,, - 2022-01-01T14:00:00.000000000,18446744073709551615,9458906648771141622,ada,2022-01-01T14:00:00.000000000,ada,3,4,, - 2022-01-01T14:00:00.000000000,18446744073709551615,13887258915425630658,brian,2022-01-01T13:40:00.000000000,brian,2,3,, - 2022-01-01T15:00:00.000000000,18446744073709551615,9458906648771141622,ada,2022-01-01T14:00:00.000000000,ada,0,4,, - 2022-01-01T15:00:00.000000000,18446744073709551615,13887258915425630658,brian,2022-01-01T15:00:00.000000000,brian,1,4,, + 2022-01-01T12:00:00.000000000,18446744073709551615,763888930855861605,brian,2022-01-01T12:00:00.000000000,brian,1,1,, + 2022-01-01T12:00:00.000000000,18446744073709551615,10511513854575835016,ada,2022-01-01T12:00:00.000000000,ada,1,1,, + 2022-01-01T13:00:00.000000000,18446744073709551615,763888930855861605,brian,2022-01-01T12:00:00.000000000,brian,0,1,, + 2022-01-01T13:00:00.000000000,18446744073709551615,10511513854575835016,ada,2022-01-01T12:00:00.000000000,ada,0,1,, + 2022-01-01T14:00:00.000000000,18446744073709551615,763888930855861605,brian,2022-01-01T13:40:00.000000000,brian,2,3,, + 2022-01-01T14:00:00.000000000,18446744073709551615,10511513854575835016,ada,2022-01-01T14:00:00.000000000,ada,3,4,, + 2022-01-01T15:00:00.000000000,18446744073709551615,763888930855861605,brian,2022-01-01T15:00:00.000000000,brian,1,4,, + 2022-01-01T15:00:00.000000000,18446744073709551615,10511513854575835016,ada,2022-01-01T14:00:00.000000000,ada,0,4,, "###); } @@ -81,5 +81,5 @@ async fn test_sample_events_to_parquet() { .unwrap(); assert_eq!(no_simplifier, simplifier); - insta::assert_snapshot!(simplifier, @"C4EDACE415EE29FDD330508491AF9D6248905E60D0CC722751F9117D") + insta::assert_snapshot!(simplifier, @"D2493BFB6F4BA9C5F926AA4F51931952BE705773CEA8C5F3A8256121") } diff --git a/crates/sparrow-main/tests/e2e/parquet_tests.rs b/crates/sparrow-main/tests/e2e/parquet_tests.rs index 3aea73d04..8e9308718 100644 --- a/crates/sparrow-main/tests/e2e/parquet_tests.rs +++ b/crates/sparrow-main/tests/e2e/parquet_tests.rs @@ -112,10 +112,10 @@ async fn test_timestamp_microseconds() { insta::assert_snapshot!(QueryFixture::new("Events").run_to_csv(&data_fixture).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,user_id - 1970-01-01T00:00:00.001000000,17227329910109684520,7636293598395510443,a,1970-01-01T00:00:00.001000000,a - 1970-01-01T00:00:00.001001000,17227329910109684521,2637710838665036908,b,1970-01-01T00:00:00.001001000,b - 1970-01-01T00:00:00.001002000,17227329910109684522,5899024403724905519,c,1970-01-01T00:00:00.001002000,c - 1970-01-01T00:00:00.001003000,17227329910109684523,2459037462255564612,d,1970-01-01T00:00:00.001003000,d + 1970-01-01T00:00:00.001000000,17227329910109684520,13074916891489937275,a,1970-01-01T00:00:00.001000000,a + 1970-01-01T00:00:00.001001000,17227329910109684521,12352002978215245678,b,1970-01-01T00:00:00.001001000,b + 1970-01-01T00:00:00.001002000,17227329910109684522,298518813902531243,c,1970-01-01T00:00:00.001002000,c + 1970-01-01T00:00:00.001003000,17227329910109684523,5884497185123646446,d,1970-01-01T00:00:00.001003000,d "###); } @@ -146,20 +146,20 @@ async fn test_multi_file_purchases() { }").run_to_csv(&data_fixture).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,entity,max_amount - 2020-01-01T00:00:00.000000000,9223372036854775808,10966214875107816766,karen,2020-01-01T00:00:00.000000000,karen,9 - 2020-01-01T00:00:00.000000000,9223372036854775809,15119067519137142314,patrick,2020-01-01T00:00:00.000000000,patrick,3 - 2020-01-02T00:00:00.000000000,9223372036854775810,10966214875107816766,karen,2020-01-02T00:00:00.000000000,karen,9 - 2020-01-02T00:00:00.000000000,9223372036854775811,15119067519137142314,patrick,2020-01-02T00:00:00.000000000,patrick,5 - 2020-01-03T00:00:00.000000000,9223372036854775812,10966214875107816766,karen,2020-01-03T00:00:00.000000000,karen,9 - 2020-01-03T00:00:00.000000000,9223372036854775813,15119067519137142314,patrick,2020-01-03T00:00:00.000000000,patrick,12 - 2020-01-04T00:00:00.000000000,9223372036854775814,15119067519137142314,patrick,2020-01-04T00:00:00.000000000,patrick,5000 - 2020-01-04T00:00:00.000000000,9223372036854775815,10966214875107816766,karen,2020-01-04T00:00:00.000000000,karen,9 - 2020-01-05T00:00:00.000000000,9223372036854775816,10966214875107816766,karen,2020-01-05T00:00:00.000000000,karen,9 - 2020-01-05T00:00:00.000000000,9223372036854775817,15119067519137142314,patrick,2020-01-05T00:00:00.000000000,patrick,5000 - 2020-01-06T00:00:00.000000000,9223372036854775808,15119067519137142314,patrick,2020-01-06T00:00:00.000000000,patrick,5000 - 2020-01-06T00:00:00.000000000,9223372036854775809,14116511794099673386,spongebob,2020-01-06T00:00:00.000000000,spongebob,7 - 2020-01-07T00:00:00.000000000,9223372036854775810,14116511794099673386,spongebob,2020-01-07T00:00:00.000000000,spongebob,34 - 2020-01-08T00:00:00.000000000,9223372036854775811,10966214875107816766,karen,2020-01-08T00:00:00.000000000,karen,9 - 2020-01-08T00:00:00.000000000,9223372036854775812,15119067519137142314,patrick,2020-01-08T00:00:00.000000000,patrick,5000 + 2020-01-01T00:00:00.000000000,9223372036854775808,4674756217206002200,karen,2020-01-01T00:00:00.000000000,karen,9 + 2020-01-01T00:00:00.000000000,9223372036854775809,14576041771120212628,patrick,2020-01-01T00:00:00.000000000,patrick,3 + 2020-01-02T00:00:00.000000000,9223372036854775810,4674756217206002200,karen,2020-01-02T00:00:00.000000000,karen,9 + 2020-01-02T00:00:00.000000000,9223372036854775811,14576041771120212628,patrick,2020-01-02T00:00:00.000000000,patrick,5 + 2020-01-03T00:00:00.000000000,9223372036854775812,4674756217206002200,karen,2020-01-03T00:00:00.000000000,karen,9 + 2020-01-03T00:00:00.000000000,9223372036854775813,14576041771120212628,patrick,2020-01-03T00:00:00.000000000,patrick,12 + 2020-01-04T00:00:00.000000000,9223372036854775814,14576041771120212628,patrick,2020-01-04T00:00:00.000000000,patrick,5000 + 2020-01-04T00:00:00.000000000,9223372036854775815,4674756217206002200,karen,2020-01-04T00:00:00.000000000,karen,9 + 2020-01-05T00:00:00.000000000,9223372036854775816,4674756217206002200,karen,2020-01-05T00:00:00.000000000,karen,9 + 2020-01-05T00:00:00.000000000,9223372036854775817,14576041771120212628,patrick,2020-01-05T00:00:00.000000000,patrick,5000 + 2020-01-06T00:00:00.000000000,9223372036854775808,14576041771120212628,patrick,2020-01-06T00:00:00.000000000,patrick,5000 + 2020-01-06T00:00:00.000000000,9223372036854775809,6566809397636161383,spongebob,2020-01-06T00:00:00.000000000,spongebob,7 + 2020-01-07T00:00:00.000000000,9223372036854775810,6566809397636161383,spongebob,2020-01-07T00:00:00.000000000,spongebob,34 + 2020-01-08T00:00:00.000000000,9223372036854775811,4674756217206002200,karen,2020-01-08T00:00:00.000000000,karen,9 + 2020-01-08T00:00:00.000000000,9223372036854775812,14576041771120212628,patrick,2020-01-08T00:00:00.000000000,patrick,5000 "###); } diff --git a/crates/sparrow-main/tests/e2e/prepare_tests.rs b/crates/sparrow-main/tests/e2e/prepare_tests.rs index 7f9976185..3f3bc1979 100644 --- a/crates/sparrow-main/tests/e2e/prepare_tests.rs +++ b/crates/sparrow-main/tests/e2e/prepare_tests.rs @@ -27,12 +27,12 @@ async fn test_prepare_default_subsort_parquet() { insta::assert_snapshot!(QueryFixture::new("Numbers").run_to_csv(&data_fixture).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,key,m,n - 1996-12-20T00:39:57.000000000,14798209437330752939,3650215962958587783,A,1996-12-20T00:39:57.000000000,A,5,10 - 1996-12-20T00:39:58.000000000,14798209437330752940,11753611437813598533,B,1996-12-20T00:39:58.000000000,B,24,3 - 1996-12-20T00:39:59.000000000,14798209437330752941,3650215962958587783,A,1996-12-20T00:39:59.000000000,A,17,6 - 1996-12-20T00:40:00.000000000,14798209437330752942,3650215962958587783,A,1996-12-20T00:40:00.000000000,A,,9 - 1996-12-20T00:40:01.000000000,14798209437330752943,3650215962958587783,A,1996-12-20T00:40:01.000000000,A,12, - 1996-12-20T00:40:02.000000000,14798209437330752944,3650215962958587783,A,1996-12-20T00:40:02.000000000,A,, + 1996-12-20T00:39:57.000000000,14798209437330752939,12960666915911099378,A,1996-12-20T00:39:57.000000000,A,5,10 + 1996-12-20T00:39:58.000000000,14798209437330752940,2867199309159137213,B,1996-12-20T00:39:58.000000000,B,24,3 + 1996-12-20T00:39:59.000000000,14798209437330752941,12960666915911099378,A,1996-12-20T00:39:59.000000000,A,17,6 + 1996-12-20T00:40:00.000000000,14798209437330752942,12960666915911099378,A,1996-12-20T00:40:00.000000000,A,,9 + 1996-12-20T00:40:01.000000000,14798209437330752943,12960666915911099378,A,1996-12-20T00:40:01.000000000,A,12, + 1996-12-20T00:40:02.000000000,14798209437330752944,12960666915911099378,A,1996-12-20T00:40:02.000000000,A,, "###); } @@ -64,12 +64,12 @@ async fn test_prepare_key_columns_parquet() { .unwrap(); insta::assert_snapshot!(QueryFixture::new("Numbers").run_to_csv(&data_fixture).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,m,n - 1996-12-20T00:39:57.000000000,9223372036854775809,3650215962958587783,A,1996-12-20T00:39:57.000000000,1,A,5,10 - 1996-12-20T00:39:58.000000000,9223372036854775810,11753611437813598533,B,1996-12-20T00:39:58.000000000,2,B,24,3 - 1996-12-20T00:39:59.000000000,9223372036854775811,3650215962958587783,A,1996-12-20T00:39:59.000000000,3,A,17,6 - 1996-12-20T00:40:00.000000000,9223372036854775812,3650215962958587783,A,1996-12-20T00:40:00.000000000,4,A,,9 - 1996-12-20T00:40:01.000000000,9223372036854775813,3650215962958587783,A,1996-12-20T00:40:01.000000000,5,A,12, - 1996-12-20T00:40:02.000000000,9223372036854775814,3650215962958587783,A,1996-12-20T00:40:02.000000000,6,A,, + 1996-12-20T00:39:57.000000000,9223372036854775809,12960666915911099378,A,1996-12-20T00:39:57.000000000,1,A,5,10 + 1996-12-20T00:39:58.000000000,9223372036854775810,2867199309159137213,B,1996-12-20T00:39:58.000000000,2,B,24,3 + 1996-12-20T00:39:59.000000000,9223372036854775811,12960666915911099378,A,1996-12-20T00:39:59.000000000,3,A,17,6 + 1996-12-20T00:40:00.000000000,9223372036854775812,12960666915911099378,A,1996-12-20T00:40:00.000000000,4,A,,9 + 1996-12-20T00:40:01.000000000,9223372036854775813,12960666915911099378,A,1996-12-20T00:40:01.000000000,5,A,12, + 1996-12-20T00:40:02.000000000,9223372036854775814,12960666915911099378,A,1996-12-20T00:40:02.000000000,6,A,, "###); } @@ -108,9 +108,9 @@ async fn test_u64_key() { .unwrap(); insta::assert_snapshot!(QueryFixture::new("Events").run_to_csv(&data_fixture).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,not_a_key,user_id - 1970-01-01T00:00:00.000001000,17227329910109684520,14253486467890685049,0,1970-01-01T00:00:00.000001000,r0,0 - 1970-01-01T00:00:00.000001001,17227329910109684521,2359047937476779835,1,1970-01-01T00:00:00.000001001,r1,1 - 1970-01-01T00:00:00.000001002,17227329910109684522,1575016611515860288,2,1970-01-01T00:00:00.000001002,,2 - 1970-01-01T00:00:00.000001003,17227329910109684523,11820145550582457114,4,1970-01-01T00:00:00.000001003,r4,4 + 1970-01-01T00:00:00.000001000,17227329910109684520,11832085162654999889,0,1970-01-01T00:00:00.000001000,r0,0 + 1970-01-01T00:00:00.000001001,17227329910109684521,18433805721903975440,1,1970-01-01T00:00:00.000001001,r1,1 + 1970-01-01T00:00:00.000001002,17227329910109684522,2694864431690786590,2,1970-01-01T00:00:00.000001002,,2 + 1970-01-01T00:00:00.000001003,17227329910109684523,17062639839782733832,4,1970-01-01T00:00:00.000001003,r4,4 "###); } diff --git a/crates/sparrow-main/tests/e2e/record_tests.rs b/crates/sparrow-main/tests/e2e/record_tests.rs index a2c7bf51e..1de83c745 100644 --- a/crates/sparrow-main/tests/e2e/record_tests.rs +++ b/crates/sparrow-main/tests/e2e/record_tests.rs @@ -15,12 +15,12 @@ async fn test_record_creation() { let z = record.z in { n: record.x + record.n, y, z }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,y,z - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,hello,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,10,hello,World - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,3,hello,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,3,hello, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,7,hello, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,,hello,goodbye + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,hello,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,10,hello,World + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,3,hello,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,3,hello, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,7,hello, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,,hello,goodbye "###); } @@ -32,12 +32,12 @@ async fn test_record_extension() { let z = record.s in { n: record.x + record.n, y, z }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,y,z - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,hello,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,10,hello,World - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,3,hello,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,3,hello, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,7,hello, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,,hello,goodbye + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,hello,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,10,hello,World + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,3,hello,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,3,hello, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,7,hello, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,,hello,goodbye "###); } @@ -68,12 +68,12 @@ async fn test_record_extension_error() { async fn test_record_extension_ordering() { insta::assert_snapshot!(QueryFixture::new("Strings | extend({ x: 5, y: \"hello\"})").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,x,y,time,subsort,key,s,n,t - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5,hello,1996-12-20T00:39:57.000000000,0,A,hEllo,0,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,5,hello,1996-12-20T00:40:57.000000000,0,B,World,5,world - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,5,hello,1996-12-20T00:41:57.000000000,0,B,hello world,-2,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,5,hello,1996-12-20T00:42:57.000000000,0,B,,-2,greetings - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,5,hello,1996-12-20T00:43:57.000000000,0,B,,2,salutations - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,5,hello,1996-12-20T00:44:57.000000000,0,B,goodbye,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5,hello,1996-12-20T00:39:57.000000000,0,A,hEllo,0,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,5,hello,1996-12-20T00:40:57.000000000,0,B,World,5,world + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,5,hello,1996-12-20T00:41:57.000000000,0,B,hello world,-2,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,5,hello,1996-12-20T00:42:57.000000000,0,B,,-2,greetings + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,5,hello,1996-12-20T00:43:57.000000000,0,B,,2,salutations + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,5,hello,1996-12-20T00:44:57.000000000,0,B,goodbye,, "###); } @@ -81,12 +81,12 @@ async fn test_record_extension_ordering() { async fn test_record_removal() { insta::assert_snapshot!(QueryFixture::new("remove_fields(Strings, \"time\", \"subsort\")").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,key,s,n,t - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,A,hEllo,0,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,B,World,5,world - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,B,hello world,-2,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,B,,-2,greetings - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,B,,2,salutations - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,B,goodbye,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,A,hEllo,0,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,B,World,5,world + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,B,hello world,-2,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,B,,-2,greetings + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,B,,2,salutations + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,B,goodbye,, "###) } @@ -94,12 +94,12 @@ async fn test_record_removal() { async fn test_record_removal_pipe() { insta::assert_snapshot!(QueryFixture::new("Strings | remove_fields($input, \"time\", \"subsort\")").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,key,s,n,t - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,A,hEllo,0,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,B,World,5,world - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,B,hello world,-2,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,B,,-2,greetings - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,B,,2,salutations - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,B,goodbye,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,A,hEllo,0,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,B,World,5,world + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,B,hello world,-2,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,B,,-2,greetings + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,B,,2,salutations + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,B,goodbye,, "###) } @@ -107,12 +107,12 @@ async fn test_record_removal_pipe() { async fn test_record_select() { insta::assert_snapshot!(QueryFixture::new("select_fields(Strings, \"time\", \"s\")").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,s - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:57.000000000,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:40:57.000000000,World - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:41:57.000000000,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:42:57.000000000, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:43:57.000000000, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:44:57.000000000,goodbye + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:57.000000000,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:40:57.000000000,World + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:41:57.000000000,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:42:57.000000000, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:43:57.000000000, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:44:57.000000000,goodbye "###) } @@ -133,10 +133,10 @@ async fn test_record_select_unused_key() { insta::assert_snapshot!(QueryFixture::new("select_fields(Input, 'a', 'b')").run_to_csv(&data_fixture).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,a,b - 2021-01-01T00:00:00.000000000,4662198394987858659,3650215962958587783,A,5.0,1.2 - 2021-01-02T00:00:00.000000000,4662198394987858660,3650215962958587783,A,6.3,0.4 - 2021-03-01T00:00:00.000000000,4662198394987858661,11753611437813598533,B,,3.7 - 2021-04-10T00:00:00.000000000,4662198394987858662,3650215962958587783,A,13.0, + 2021-01-01T00:00:00.000000000,4662198394987858659,12960666915911099378,A,5.0,1.2 + 2021-01-02T00:00:00.000000000,4662198394987858660,12960666915911099378,A,6.3,0.4 + 2021-03-01T00:00:00.000000000,4662198394987858661,2867199309159137213,B,,3.7 + 2021-04-10T00:00:00.000000000,4662198394987858662,12960666915911099378,A,13.0, "###) } @@ -144,12 +144,12 @@ async fn test_record_select_unused_key() { async fn test_record_select_pipe() { insta::assert_snapshot!(QueryFixture::new("Strings | select_fields($input, \"time\", \"s\")").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,s - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:57.000000000,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:40:57.000000000,World - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:41:57.000000000,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:42:57.000000000, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:43:57.000000000, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:44:57.000000000,goodbye + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:57.000000000,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:40:57.000000000,World + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:41:57.000000000,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:42:57.000000000, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:43:57.000000000, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:44:57.000000000,goodbye "###) } diff --git a/crates/sparrow-main/tests/e2e/resumeable_tests.rs b/crates/sparrow-main/tests/e2e/resumeable_tests.rs index c9326b44e..0fa1aec33 100644 --- a/crates/sparrow-main/tests/e2e/resumeable_tests.rs +++ b/crates/sparrow-main/tests/e2e/resumeable_tests.rs @@ -117,8 +117,8 @@ async fn test_basic_resumeable_final() { insta::assert_snapshot!(result, @r###" _time,_subsort,_key_hash,_key,key,m,sum_m - 1997-12-20T00:40:02.000000001,18446744073709551615,3650215962958587783,A,A,,34 - 1997-12-20T00:40:02.000000001,18446744073709551615,11753611437813598533,B,B,2,30 + 1997-12-20T00:40:02.000000001,18446744073709551615,2867199309159137213,B,B,2,30 + 1997-12-20T00:40:02.000000001,18446744073709551615,12960666915911099378,A,A,,34 "###); } @@ -158,8 +158,8 @@ async fn test_resumeable_entity_reordered() { insta::assert_snapshot!(result, @r###" _time,_subsort,_key_hash,_key,key,m,sum_m - 1997-12-20T00:40:02.000000001,18446744073709551615,3650215962958587783,A,A,2,26 - 1997-12-20T00:40:02.000000001,18446744073709551615,11753611437813598533,B,B,2,38 + 1997-12-20T00:40:02.000000001,18446744073709551615,2867199309159137213,B,B,2,38 + 1997-12-20T00:40:02.000000001,18446744073709551615,12960666915911099378,A,A,2,26 "###); } @@ -238,8 +238,8 @@ async fn test_resumeable_with_unordered_file_sets() { insta::assert_snapshot!(persistent_results, @r###" _time,_subsort,_key_hash,_key,key,m,sum_m - 1997-12-20T00:40:02.000000001,18446744073709551615,3650215962958587783,A,A,2,26 - 1997-12-20T00:40:02.000000001,18446744073709551615,11753611437813598533,B,B,2,38 + 1997-12-20T00:40:02.000000001,18446744073709551615,2867199309159137213,B,B,2,38 + 1997-12-20T00:40:02.000000001,18446744073709551615,12960666915911099378,A,A,2,26 "###); } @@ -297,11 +297,11 @@ async fn test_resumeable_ticks_old_entities() { // window. insta::assert_snapshot!(result, @r###" _time,_subsort,_key_hash,_key,key,m,hourly_count - 1996-12-20T05:42:05.000000001,18446744073709551615,3650215962958587783,A,A,3,2 - 1996-12-20T05:42:05.000000001,18446744073709551615,9192031977313001967,C,C,,1 - 1996-12-20T05:42:05.000000001,18446744073709551615,11430173353997062025,D,D,2,3 - 1996-12-20T05:42:05.000000001,18446744073709551615,11753611437813598533,B,B,,3 - 1996-12-20T05:42:05.000000001,18446744073709551615,17976613645339558306,E,E,,1 + 1996-12-20T05:42:05.000000001,18446744073709551615,1021973589662386405,D,D,2,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,2521269998124177631,C,C,,1 + 1996-12-20T05:42:05.000000001,18446744073709551615,2867199309159137213,B,B,,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,12960666915911099378,A,A,3,2 + 1996-12-20T05:42:05.000000001,18446744073709551615,16662102775856596107,E,E,,1 "###); } @@ -344,9 +344,9 @@ async fn test_resumeable_when() { insta::assert_snapshot!(result, @r###" _time,_subsort,_key_hash,_key,key,m,count - 1996-12-20T05:42:05.000000001,18446744073709551615,3650215962958587783,A,A,3,3 - 1996-12-20T05:42:05.000000001,18446744073709551615,11430173353997062025,D,D,2,3 - 1996-12-20T05:42:05.000000001,18446744073709551615,11753611437813598533,B,B,2,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,1021973589662386405,D,D,2,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,2867199309159137213,B,B,2,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,12960666915911099378,A,A,3,3 "###); } @@ -389,11 +389,11 @@ async fn test_resumeable_lookup() { insta::assert_snapshot!(result, @r###" _time,_subsort,_key_hash,_key,other_count,key,m,count - 1996-12-20T05:42:05.000000001,18446744073709551615,3650215962958587783,A,3,A,3,3 - 1996-12-20T05:42:05.000000001,18446744073709551615,9192031977313001967,C,2,C,8,1 - 1996-12-20T05:42:05.000000001,18446744073709551615,11430173353997062025,D,3,D,2,3 - 1996-12-20T05:42:05.000000001,18446744073709551615,11753611437813598533,B,3,B,2,3 - 1996-12-20T05:42:05.000000001,18446744073709551615,17976613645339558306,E,,E,5,1 + 1996-12-20T05:42:05.000000001,18446744073709551615,1021973589662386405,D,3,D,2,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,2521269998124177631,C,2,C,8,1 + 1996-12-20T05:42:05.000000001,18446744073709551615,2867199309159137213,B,3,B,2,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,12960666915911099378,A,3,A,3,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,16662102775856596107,E,,E,5,1 "###); } @@ -433,14 +433,14 @@ async fn test_resumeable_with_key() { insta::assert_snapshot!(result, @r###" _time,_subsort,_key_hash,_key,time,subsort,key,m,other - 1996-12-20T05:42:05.000000001,18446744073709551615,650022633471272026,17,1996-12-20T02:39:59.000000000,0,B,17,A - 1996-12-20T05:42:05.000000001,18446744073709551615,1575016611515860288,2,1996-12-20T05:41:04.000000000,0,D,2,B - 1996-12-20T05:42:05.000000001,18446744073709551615,2359047937476779835,1,1996-12-20T03:41:00.000000000,0,A,1,C - 1996-12-20T05:42:05.000000001,18446744073709551615,4864632034659211723,8,1996-12-20T03:40:00.000000000,0,C,8,B - 1996-12-20T05:42:05.000000001,18446744073709551615,9175685813237050681,24,1996-12-20T02:39:58.000000000,0,B,24,B - 1996-12-20T05:42:05.000000001,18446744073709551615,10021492687541564645,5,1996-12-20T02:39:57.000000000,0,A,5,E - 1996-12-20T05:42:05.000000001,18446744073709551615,14956259290599888306,3,1996-12-20T05:42:05.000000000,0,A,3,D - 1996-12-20T05:42:05.000000001,18446744073709551615,17018031324644251917,12,1996-12-20T04:40:01.000000000,0,D,12,A + 1996-12-20T05:42:05.000000001,18446744073709551615,322098188319043992,17,1996-12-20T02:39:59.000000000,0,B,17,A + 1996-12-20T05:42:05.000000001,18446744073709551615,2287927947190353380,12,1996-12-20T04:40:01.000000000,0,D,12,A + 1996-12-20T05:42:05.000000001,18446744073709551615,2694864431690786590,2,1996-12-20T05:41:04.000000000,0,D,2,B + 1996-12-20T05:42:05.000000001,18446744073709551615,5496774745203840792,3,1996-12-20T05:42:05.000000000,0,A,3,D + 1996-12-20T05:42:05.000000001,18446744073709551615,6794973171266502674,8,1996-12-20T03:40:00.000000000,0,C,8,B + 1996-12-20T05:42:05.000000001,18446744073709551615,11274228027825807126,24,1996-12-20T02:39:58.000000000,0,B,24,B + 1996-12-20T05:42:05.000000001,18446744073709551615,16461383214845928621,5,1996-12-20T02:39:57.000000000,0,A,5,E + 1996-12-20T05:42:05.000000001,18446744073709551615,18433805721903975440,1,1996-12-20T03:41:00.000000000,0,A,1,C "###); } @@ -537,15 +537,16 @@ async fn test_resumeable_shift_to_column() { insta::assert_snapshot!(result, @r###" _time,_subsort,_key_hash,_key,shifted - 1996-12-20T05:42:15.000000001,18446744073709551615,3650215962958587783,A,1996-12-20T05:42:05.000000000 - 1996-12-20T05:42:15.000000001,18446744073709551615,9192031977313001967,C,1996-12-20T03:40:00.000000000 - 1996-12-20T05:42:15.000000001,18446744073709551615,11430173353997062025,D,1996-12-20T05:41:04.000000000 - 1996-12-20T05:42:15.000000001,18446744073709551615,11753611437813598533,B,1996-12-20T04:40:02.000000000 - 1996-12-20T05:42:15.000000001,18446744073709551615,17976613645339558306,E,1996-12-20T02:38:57.000000000 + 1996-12-20T05:42:15.000000001,18446744073709551615,1021973589662386405,D,1996-12-20T05:41:04.000000000 + 1996-12-20T05:42:15.000000001,18446744073709551615,2521269998124177631,C,1996-12-20T03:40:00.000000000 + 1996-12-20T05:42:15.000000001,18446744073709551615,2867199309159137213,B,1996-12-20T04:40:02.000000000 + 1996-12-20T05:42:15.000000001,18446744073709551615,12960666915911099378,A,1996-12-20T05:42:05.000000000 + 1996-12-20T05:42:15.000000001,18446744073709551615,16662102775856596107,E,1996-12-20T02:38:57.000000000 "###); } #[tokio::test] +#[ignore = "resumeable not implemented for lag"] async fn test_resumeable_lag_basic() { // Test for resuming a query with a lag operator. let query_fixture = @@ -582,11 +583,11 @@ async fn test_resumeable_lag_basic() { insta::assert_snapshot!(result, @r###" _time,_subsort,_key_hash,_key,lag_1_m,lag_2_n - 1996-12-20T05:42:05.000000001,18446744073709551615,3650215962958587783,A,1,10 - 1996-12-20T05:42:05.000000001,18446744073709551615,9192031977313001967,C,, - 1996-12-20T05:42:05.000000001,18446744073709551615,11430173353997062025,D,2, - 1996-12-20T05:42:05.000000001,18446744073709551615,11753611437813598533,B,17,3 - 1996-12-20T05:42:05.000000001,18446744073709551615,17976613645339558306,E,, + 1996-12-20T05:42:05.000000001,18446744073709551615,1021973589662386405,D,2, + 1996-12-20T05:42:05.000000001,18446744073709551615,2521269998124177631,C,, + 1996-12-20T05:42:05.000000001,18446744073709551615,2867199309159137213,B,17,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,12960666915911099378,A,1, + 1996-12-20T05:42:05.000000001,18446744073709551615,16662102775856596107,E,, "###); } @@ -655,10 +656,10 @@ async fn test_resumeable_final_no_new_data() { insta::assert_snapshot!(result1, @r###" _time,_subsort,_key_hash,_key,other_count,key,m,count - 1996-12-20T03:41:00.000000001,18446744073709551615,3650215962958587783,A,1,A,1,2 - 1996-12-20T03:41:00.000000001,18446744073709551615,9192031977313001967,C,2,C,8,1 - 1996-12-20T03:41:00.000000001,18446744073709551615,11753611437813598533,B,1,B,17,2 - 1996-12-20T03:41:00.000000001,18446744073709551615,17976613645339558306,E,,E,5,1 + 1996-12-20T03:41:00.000000001,18446744073709551615,2521269998124177631,C,2,C,8,1 + 1996-12-20T03:41:00.000000001,18446744073709551615,2867199309159137213,B,1,B,17,2 + 1996-12-20T03:41:00.000000001,18446744073709551615,12960666915911099378,A,1,A,1,2 + 1996-12-20T03:41:00.000000001,18446744073709551615,16662102775856596107,E,,E,5,1 "###); // Run the query again @@ -669,10 +670,10 @@ async fn test_resumeable_final_no_new_data() { .unwrap(); insta::assert_snapshot!(result3, @r###" _time,_subsort,_key_hash,_key,other_count,key,m,count - 1996-12-20T03:41:00.000000001,18446744073709551615,3650215962958587783,A,1,A,1,2 - 1996-12-20T03:41:00.000000001,18446744073709551615,9192031977313001967,C,2,C,8,1 - 1996-12-20T03:41:00.000000001,18446744073709551615,11753611437813598533,B,1,B,17,2 - 1996-12-20T03:41:00.000000001,18446744073709551615,17976613645339558306,E,,E,5,1 + 1996-12-20T03:41:00.000000001,18446744073709551615,2521269998124177631,C,2,C,8,1 + 1996-12-20T03:41:00.000000001,18446744073709551615,2867199309159137213,B,1,B,17,2 + 1996-12-20T03:41:00.000000001,18446744073709551615,12960666915911099378,A,1,A,1,2 + 1996-12-20T03:41:00.000000001,18446744073709551615,16662102775856596107,E,,E,5,1 "###); } @@ -720,11 +721,11 @@ async fn test_resumeable_with_preview_rows() { insta::assert_snapshot!(result, @r###" _time,_subsort,_key_hash,_key,other_count,key,m,count - 1996-12-20T05:42:05.000000001,18446744073709551615,3650215962958587783,A,3,A,3,3 - 1996-12-20T05:42:05.000000001,18446744073709551615,9192031977313001967,C,2,C,8,1 - 1996-12-20T05:42:05.000000001,18446744073709551615,11430173353997062025,D,3,D,2,3 - 1996-12-20T05:42:05.000000001,18446744073709551615,11753611437813598533,B,3,B,2,3 - 1996-12-20T05:42:05.000000001,18446744073709551615,17976613645339558306,E,,E,5,1 + 1996-12-20T05:42:05.000000001,18446744073709551615,1021973589662386405,D,3,D,2,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,2521269998124177631,C,2,C,8,1 + 1996-12-20T05:42:05.000000001,18446744073709551615,2867199309159137213,B,3,B,2,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,12960666915911099378,A,3,A,3,3 + 1996-12-20T05:42:05.000000001,18446744073709551615,16662102775856596107,E,,E,5,1 "###); } @@ -761,8 +762,8 @@ async fn test_shift_until() { insta::assert_snapshot!(result, @r###" _time,_subsort,_key_hash,_key,key - 1997-12-20T00:40:02.000000001,18446744073709551615,3650215962958587783,A,A - 1997-12-20T00:40:02.000000001,18446744073709551615,11753611437813598533,B,B + 1997-12-20T00:40:02.000000001,18446744073709551615,2867199309159137213,B,B + 1997-12-20T00:40:02.000000001,18446744073709551615,12960666915911099378,A,A "###); } diff --git a/crates/sparrow-main/tests/e2e/shift_tests.rs b/crates/sparrow-main/tests/e2e/shift_tests.rs index 34ebda4bd..764ab404e 100644 --- a/crates/sparrow-main/tests/e2e/shift_tests.rs +++ b/crates/sparrow-main/tests/e2e/shift_tests.rs @@ -38,12 +38,12 @@ async fn shift_data_fixture() -> DataFixture { async fn test_shift_by_months() { insta::assert_snapshot!(QueryFixture::new("{ i64: ShiftFixture.i64 | shift_by(months(5)) }").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64 - 1997-05-20T00:39:57.000000000,0,3650215962958587783,A,57 - 1997-05-20T00:39:58.000000000,1,11753611437813598533,B,58 - 1997-05-20T00:39:59.000000000,2,3650215962958587783,A,59 - 1997-05-20T00:40:00.000000000,3,11753611437813598533,B, - 1997-05-20T00:40:01.000000000,4,3650215962958587783,A, - 1997-05-20T00:40:02.000000000,5,3650215962958587783,A,2 + 1997-05-20T00:39:57.000000000,0,12960666915911099378,A,57 + 1997-05-20T00:39:58.000000000,1,2867199309159137213,B,58 + 1997-05-20T00:39:59.000000000,2,12960666915911099378,A,59 + 1997-05-20T00:40:00.000000000,3,2867199309159137213,B, + 1997-05-20T00:40:01.000000000,4,12960666915911099378,A, + 1997-05-20T00:40:02.000000000,5,12960666915911099378,A,2 "###) } @@ -51,12 +51,12 @@ async fn test_shift_by_months() { async fn test_shift_by_seconds() { insta::assert_snapshot!(QueryFixture::new("{ i64: ShiftFixture.i64 | shift_by(seconds(5)) }").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64 - 1996-12-20T00:40:02.000000000,0,3650215962958587783,A,57 - 1996-12-20T00:40:03.000000000,1,11753611437813598533,B,58 - 1996-12-20T00:40:04.000000000,2,3650215962958587783,A,59 - 1996-12-20T00:40:05.000000000,3,11753611437813598533,B, - 1996-12-20T00:40:06.000000000,4,3650215962958587783,A, - 1996-12-20T00:40:07.000000000,5,3650215962958587783,A,2 + 1996-12-20T00:40:02.000000000,0,12960666915911099378,A,57 + 1996-12-20T00:40:03.000000000,1,2867199309159137213,B,58 + 1996-12-20T00:40:04.000000000,2,12960666915911099378,A,59 + 1996-12-20T00:40:05.000000000,3,2867199309159137213,B, + 1996-12-20T00:40:06.000000000,4,12960666915911099378,A, + 1996-12-20T00:40:07.000000000,5,12960666915911099378,A,2 "###) } @@ -64,12 +64,12 @@ async fn test_shift_by_seconds() { async fn test_shift_to_plus_seconds() { insta::assert_snapshot!(QueryFixture::new("{ i64: ShiftFixture.i64 | shift_to(add_time(seconds(5), time_of($input))) }").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64 - 1996-12-20T00:40:02.000000000,0,3650215962958587783,A,57 - 1996-12-20T00:40:03.000000000,1,11753611437813598533,B,58 - 1996-12-20T00:40:04.000000000,2,3650215962958587783,A,59 - 1996-12-20T00:40:05.000000000,3,11753611437813598533,B, - 1996-12-20T00:40:06.000000000,4,3650215962958587783,A, - 1996-12-20T00:40:07.000000000,5,3650215962958587783,A,2 + 1996-12-20T00:40:02.000000000,0,12960666915911099378,A,57 + 1996-12-20T00:40:03.000000000,1,2867199309159137213,B,58 + 1996-12-20T00:40:04.000000000,2,12960666915911099378,A,59 + 1996-12-20T00:40:05.000000000,3,2867199309159137213,B, + 1996-12-20T00:40:06.000000000,4,12960666915911099378,A, + 1996-12-20T00:40:07.000000000,5,12960666915911099378,A,2 "###) } @@ -77,12 +77,12 @@ async fn test_shift_to_plus_seconds() { async fn test_shift_until_data_i64() { insta::assert_snapshot!(QueryFixture::new("{ i64: ShiftFixture.i64 | shift_until(ShiftFixture.cond) }").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64 - 1996-12-20T00:39:57.000000000,0,3650215962958587783,A,57 - 1996-12-20T00:40:00.000000000,1,11753611437813598533,B,58 - 1996-12-20T00:40:00.000000000,2,11753611437813598533,B, - 1996-12-20T00:40:02.000000000,3,3650215962958587783,A,59 - 1996-12-20T00:40:02.000000000,4,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,5,3650215962958587783,A,2 + 1996-12-20T00:39:57.000000000,0,12960666915911099378,A,57 + 1996-12-20T00:40:00.000000000,1,2867199309159137213,B,58 + 1996-12-20T00:40:00.000000000,2,2867199309159137213,B, + 1996-12-20T00:40:02.000000000,3,12960666915911099378,A,59 + 1996-12-20T00:40:02.000000000,4,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,5,12960666915911099378,A,2 "###) } @@ -90,12 +90,12 @@ async fn test_shift_until_data_i64() { async fn test_shift_until_data_boolean() { insta::assert_snapshot!(QueryFixture::new("{ bool: ShiftFixture.bool | shift_until(ShiftFixture.cond) }").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,bool - 1996-12-20T00:39:57.000000000,0,3650215962958587783,A,false - 1996-12-20T00:40:00.000000000,1,11753611437813598533,B,true - 1996-12-20T00:40:00.000000000,2,11753611437813598533,B, - 1996-12-20T00:40:02.000000000,3,3650215962958587783,A,true - 1996-12-20T00:40:02.000000000,4,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,5,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,0,12960666915911099378,A,false + 1996-12-20T00:40:00.000000000,1,2867199309159137213,B,true + 1996-12-20T00:40:00.000000000,2,2867199309159137213,B, + 1996-12-20T00:40:02.000000000,3,12960666915911099378,A,true + 1996-12-20T00:40:02.000000000,4,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,5,12960666915911099378,A, "###) } @@ -103,12 +103,12 @@ async fn test_shift_until_data_boolean() { async fn test_shift_until_data_string() { insta::assert_snapshot!(QueryFixture::new("{ string: ShiftFixture.string | shift_until(ShiftFixture.cond) }").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,string - 1996-12-20T00:39:57.000000000,0,3650215962958587783,A,hello - 1996-12-20T00:40:00.000000000,1,11753611437813598533,B,world - 1996-12-20T00:40:00.000000000,2,11753611437813598533,B, - 1996-12-20T00:40:02.000000000,3,3650215962958587783,A,world - 1996-12-20T00:40:02.000000000,4,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,5,3650215962958587783,A,hello + 1996-12-20T00:39:57.000000000,0,12960666915911099378,A,hello + 1996-12-20T00:40:00.000000000,1,2867199309159137213,B,world + 1996-12-20T00:40:00.000000000,2,2867199309159137213,B, + 1996-12-20T00:40:02.000000000,3,12960666915911099378,A,world + 1996-12-20T00:40:02.000000000,4,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,5,12960666915911099378,A,hello "###) } @@ -116,12 +116,12 @@ async fn test_shift_until_data_string() { async fn test_shift_until_data_record() { insta::assert_snapshot!(QueryFixture::new("ShiftFixture | shift_until($input.cond)").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,cond,bool,i64,string,other_time - 1996-12-20T00:39:57.000000000,0,3650215962958587783,A,1996-12-20T00:39:57.000000000,0,A,true,false,57,hello,1997-12-20T00:39:57.000000000 - 1996-12-20T00:40:00.000000000,1,11753611437813598533,B,1996-12-20T00:39:58.000000000,0,B,false,true,58,world,1997-10-20T00:39:57.000000000 - 1996-12-20T00:40:00.000000000,2,11753611437813598533,B,1996-12-20T00:40:00.000000000,0,B,true,,,,2000-12-20T00:39:57.000000000 - 1996-12-20T00:40:02.000000000,3,3650215962958587783,A,1996-12-20T00:39:59.000000000,0,A,,true,59,world,1995-12-20T00:39:57.000000000 - 1996-12-20T00:40:02.000000000,4,3650215962958587783,A,1996-12-20T00:40:01.000000000,0,A,false,,,, - 1996-12-20T00:40:02.000000000,5,3650215962958587783,A,1996-12-20T00:40:02.000000000,0,A,true,,2,hello,1999-01-20T00:39:57.000000000 + 1996-12-20T00:39:57.000000000,0,12960666915911099378,A,1996-12-20T00:39:57.000000000,0,A,true,false,57,hello,1997-12-20T00:39:57.000000000 + 1996-12-20T00:40:00.000000000,1,2867199309159137213,B,1996-12-20T00:39:58.000000000,0,B,false,true,58,world,1997-10-20T00:39:57.000000000 + 1996-12-20T00:40:00.000000000,2,2867199309159137213,B,1996-12-20T00:40:00.000000000,0,B,true,,,,2000-12-20T00:39:57.000000000 + 1996-12-20T00:40:02.000000000,3,12960666915911099378,A,1996-12-20T00:39:59.000000000,0,A,,true,59,world,1995-12-20T00:39:57.000000000 + 1996-12-20T00:40:02.000000000,4,12960666915911099378,A,1996-12-20T00:40:01.000000000,0,A,false,,,, + 1996-12-20T00:40:02.000000000,5,12960666915911099378,A,1996-12-20T00:40:02.000000000,0,A,true,,2,hello,1999-01-20T00:39:57.000000000 "###) } @@ -177,10 +177,10 @@ async fn test_shift_to_literal_record() { async fn test_shift_to_data_i64() { insta::assert_snapshot!(QueryFixture::new("{ i64: ShiftFixture.i64 | shift_to(ShiftFixture.other_time) }").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,i64 - 1997-10-20T00:39:57.000000000,0,11753611437813598533,B,58 - 1997-12-20T00:39:57.000000000,1,3650215962958587783,A,57 - 1999-01-20T00:39:57.000000000,2,3650215962958587783,A,2 - 2000-12-20T00:39:57.000000000,3,11753611437813598533,B, + 1997-10-20T00:39:57.000000000,0,2867199309159137213,B,58 + 1997-12-20T00:39:57.000000000,1,12960666915911099378,A,57 + 1999-01-20T00:39:57.000000000,2,12960666915911099378,A,2 + 2000-12-20T00:39:57.000000000,3,2867199309159137213,B, "###) } @@ -188,10 +188,10 @@ async fn test_shift_to_data_i64() { async fn test_shift_to_data_boolean() { insta::assert_snapshot!(QueryFixture::new("{ bool: ShiftFixture.bool | shift_to(ShiftFixture.other_time) }").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,bool - 1997-10-20T00:39:57.000000000,0,11753611437813598533,B,true - 1997-12-20T00:39:57.000000000,1,3650215962958587783,A,false - 1999-01-20T00:39:57.000000000,2,3650215962958587783,A, - 2000-12-20T00:39:57.000000000,3,11753611437813598533,B, + 1997-10-20T00:39:57.000000000,0,2867199309159137213,B,true + 1997-12-20T00:39:57.000000000,1,12960666915911099378,A,false + 1999-01-20T00:39:57.000000000,2,12960666915911099378,A, + 2000-12-20T00:39:57.000000000,3,2867199309159137213,B, "###) } @@ -199,10 +199,10 @@ async fn test_shift_to_data_boolean() { async fn test_shift_to_data_string() { insta::assert_snapshot!(QueryFixture::new("{ string: ShiftFixture.string | shift_to(ShiftFixture.other_time) }").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,string - 1997-10-20T00:39:57.000000000,0,11753611437813598533,B,world - 1997-12-20T00:39:57.000000000,1,3650215962958587783,A,hello - 1999-01-20T00:39:57.000000000,2,3650215962958587783,A,hello - 2000-12-20T00:39:57.000000000,3,11753611437813598533,B, + 1997-10-20T00:39:57.000000000,0,2867199309159137213,B,world + 1997-12-20T00:39:57.000000000,1,12960666915911099378,A,hello + 1999-01-20T00:39:57.000000000,2,12960666915911099378,A,hello + 2000-12-20T00:39:57.000000000,3,2867199309159137213,B, "###) } @@ -210,10 +210,10 @@ async fn test_shift_to_data_string() { async fn test_shift_to_data_record() { insta::assert_snapshot!(QueryFixture::new("ShiftFixture | shift_to(ShiftFixture.other_time)").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,cond,bool,i64,string,other_time - 1997-10-20T00:39:57.000000000,0,11753611437813598533,B,1996-12-20T00:39:58.000000000,0,B,false,true,58,world,1997-10-20T00:39:57.000000000 - 1997-12-20T00:39:57.000000000,1,3650215962958587783,A,1996-12-20T00:39:57.000000000,0,A,true,false,57,hello,1997-12-20T00:39:57.000000000 - 1999-01-20T00:39:57.000000000,2,3650215962958587783,A,1996-12-20T00:40:02.000000000,0,A,true,,2,hello,1999-01-20T00:39:57.000000000 - 2000-12-20T00:39:57.000000000,3,11753611437813598533,B,1996-12-20T00:40:00.000000000,0,B,true,,,,2000-12-20T00:39:57.000000000 + 1997-10-20T00:39:57.000000000,0,2867199309159137213,B,1996-12-20T00:39:58.000000000,0,B,false,true,58,world,1997-10-20T00:39:57.000000000 + 1997-12-20T00:39:57.000000000,1,12960666915911099378,A,1996-12-20T00:39:57.000000000,0,A,true,false,57,hello,1997-12-20T00:39:57.000000000 + 1999-01-20T00:39:57.000000000,2,12960666915911099378,A,1996-12-20T00:40:02.000000000,0,A,true,,2,hello,1999-01-20T00:39:57.000000000 + 2000-12-20T00:39:57.000000000,3,2867199309159137213,B,1996-12-20T00:40:00.000000000,0,B,true,,,,2000-12-20T00:39:57.000000000 "###) } @@ -227,9 +227,9 @@ async fn test_shift_until_false() { let shift_until_gt_75 = ShiftFixture.string | shift_until(gt_75) in { gt_10, shift_until_gt_10, gt_75, shift_until_gt_75 } | when(gt_10 or gt_75)").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,gt_10,shift_until_gt_10,gt_75,shift_until_gt_75 - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,,false, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,true,,false, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,true,,false, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,,false, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,true,,false, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,true,,false, "###) } @@ -243,15 +243,15 @@ async fn test_shift_until_false_sum() { let shift_until_gt_75 = ShiftFixture.string | shift_until(gt_75) in { gt_10, shift_until_gt_10, gt_75, shift_until_gt_75 }").run_to_csv(&shift_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,gt_10,shift_until_gt_10,gt_75,shift_until_gt_75 - 1996-12-20T00:39:57.000000000,0,3650215962958587783,A,,57,, - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,57,false, - 1996-12-20T00:39:58.000000000,1,11753611437813598533,B,,58,, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,true,58,false, - 1996-12-20T00:39:59.000000000,2,3650215962958587783,A,,116,, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,true,116,false, - 1996-12-20T00:40:00.000000000,9223372036854775808,11753611437813598533,B,,58,, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,,116,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,false,116,false, + 1996-12-20T00:39:57.000000000,0,12960666915911099378,A,,57,, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,57,false, + 1996-12-20T00:39:58.000000000,1,2867199309159137213,B,,58,, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,true,58,false, + 1996-12-20T00:39:59.000000000,2,12960666915911099378,A,,116,, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,true,116,false, + 1996-12-20T00:40:00.000000000,9223372036854775808,2867199309159137213,B,,58,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,,116,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,false,116,false, "###) } @@ -289,12 +289,12 @@ async fn test_shift_to_sparse() { insta::assert_snapshot!(QueryFixture::new("{ result: ShiftFixture.n | shift_to(ShiftFixture.date) }").run_to_csv(&data).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,result - 1996-06-22T00:00:00.000000000,0,13736678384813893675,Ryan,4 - 1996-07-20T00:00:00.000000000,1,13736678384813893675,Ryan,2 - 1996-07-22T00:00:00.000000000,0,13736678384813893675,Ryan,3 - 1996-07-22T00:00:00.000000000,1,12688524802574118068,Ben,5 - 1996-08-19T00:00:00.000000000,2,12688524802574118068,Ben,1 - 1996-08-22T00:00:00.000000000,0,12688524802574118068,Ben,6 + 1996-06-22T00:00:00.000000000,0,4840222152338143374,Ryan,4 + 1996-07-20T00:00:00.000000000,1,4840222152338143374,Ryan,2 + 1996-07-22T00:00:00.000000000,0,4840222152338143374,Ryan,3 + 1996-07-22T00:00:00.000000000,1,16858319356675959811,Ben,5 + 1996-08-19T00:00:00.000000000,2,16858319356675959811,Ben,1 + 1996-08-22T00:00:00.000000000,0,16858319356675959811,Ben,6 "###); } @@ -332,9 +332,9 @@ async fn shift_to_and_lookup() { .await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,sub_sort,id,v - 2000-01-11T00:00:00.000000000,0,7636293598395510443,a,2000-01-01T00:00:00.000000000,0,a,111 - 2000-01-11T01:00:00.000000000,1,5899024403724905519,c,2000-01-01T01:00:00.000000000,1,c,333 - 2000-01-12T00:00:00.000000000,2,2637710838665036908,b,2000-01-02T00:00:00.000000000,2,b,222 + 2000-01-11T00:00:00.000000000,0,13074916891489937275,a,2000-01-01T00:00:00.000000000,0,a,111 + 2000-01-11T01:00:00.000000000,1,298518813902531243,c,2000-01-01T01:00:00.000000000,1,c,333 + 2000-01-12T00:00:00.000000000,2,12352002978215245678,b,2000-01-02T00:00:00.000000000,2,b,222 "###); } @@ -358,10 +358,10 @@ async fn shift_to() { .await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,key,sub_sort,date,n - 1996-07-20T00:00:00.000000000,0,13736678384813893675,Ryan,1996-04-21T00:00:00.000000000,Ryan,1,1996-07-20T00:00:00.000000000,2 - 1996-07-22T00:00:00.000000000,0,13736678384813893675,Ryan,1996-05-21T00:00:00.000000000,Ryan,2,1996-07-22T00:00:00.000000000,3 - 1996-07-22T00:00:00.000000000,1,12688524802574118068,Ben,1996-07-21T00:00:00.000000000,Ben,4,1996-07-22T00:00:00.000000000,5 - 1996-08-19T00:00:00.000000000,2,12688524802574118068,Ben,1996-03-21T00:00:00.000000000,Ben,0,1996-08-19T00:00:00.000000000,1 - 1996-08-22T00:00:00.000000000,0,12688524802574118068,Ben,1996-08-21T00:00:00.000000000,Ben,5,1996-08-22T00:00:00.000000000,6 + 1996-07-20T00:00:00.000000000,0,4840222152338143374,Ryan,1996-04-21T00:00:00.000000000,Ryan,1,1996-07-20T00:00:00.000000000,2 + 1996-07-22T00:00:00.000000000,0,4840222152338143374,Ryan,1996-05-21T00:00:00.000000000,Ryan,2,1996-07-22T00:00:00.000000000,3 + 1996-07-22T00:00:00.000000000,1,16858319356675959811,Ben,1996-07-21T00:00:00.000000000,Ben,4,1996-07-22T00:00:00.000000000,5 + 1996-08-19T00:00:00.000000000,2,16858319356675959811,Ben,1996-03-21T00:00:00.000000000,Ben,0,1996-08-19T00:00:00.000000000,1 + 1996-08-22T00:00:00.000000000,0,16858319356675959811,Ben,1996-08-21T00:00:00.000000000,Ben,5,1996-08-22T00:00:00.000000000,6 "###); } diff --git a/crates/sparrow-main/tests/e2e/string_tests.rs b/crates/sparrow-main/tests/e2e/string_tests.rs index b9c4d711e..598de0d3f 100644 --- a/crates/sparrow-main/tests/e2e/string_tests.rs +++ b/crates/sparrow-main/tests/e2e/string_tests.rs @@ -7,12 +7,12 @@ use crate::QueryFixture; async fn test_len() { insta::assert_snapshot!(QueryFixture::new("{ len: len(Strings.s)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,len - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,5 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,11 - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,0 - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,0 - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,7 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,5 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,11 + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,0 + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,0 + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,7 "###); } @@ -20,12 +20,12 @@ async fn test_len() { async fn test_upper_len() { insta::assert_snapshot!(QueryFixture::new("{ upper: upper(Strings.s)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,upper - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,HELLO - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,WORLD - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,HELLO WORLD - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,GOODBYE + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,HELLO + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,WORLD + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,HELLO WORLD + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,GOODBYE "###); } @@ -33,12 +33,12 @@ async fn test_upper_len() { async fn test_lower_len() { insta::assert_snapshot!(QueryFixture::new("{ lower: lower(Strings.s)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,lower - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hello - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,world - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,hello world - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,goodbye + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hello + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,world + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,hello world + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,goodbye "###); } @@ -50,11 +50,11 @@ async fn test_substring() { , substring_i: substring(Strings.s, start=Strings.n), }").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,substring_0_2,substring_1,substring_0_i,substring_i - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hE,Ello,,hEllo - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,Wo,orld,World, - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,he,ello world,hello wor,ld - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,,,, - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,,,, - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,go,oodbye,goodbye,goodbye + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hE,Ello,,hEllo + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,Wo,orld,World, + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,he,ello world,hello wor,ld + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,,,, + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,,,, + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,go,oodbye,goodbye,goodbye "###); } diff --git a/crates/sparrow-main/tests/e2e/tick_tests.rs b/crates/sparrow-main/tests/e2e/tick_tests.rs index f94305ed2..bed3a2819 100644 --- a/crates/sparrow-main/tests/e2e/tick_tests.rs +++ b/crates/sparrow-main/tests/e2e/tick_tests.rs @@ -214,14 +214,14 @@ async fn data_fixture_over_years() -> DataFixture { async fn test_time_of_produces_discrete_values() { insta::assert_snapshot!(QueryFixture::new("{ t: Foo.n | last() | time_of() } | when(hourly())").run_to_csv(&data_fixture_over_hours_end_on_hour().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t - 1996-12-20T08:00:00.000000000,18446744073709551615,3650215962958587783,A, - 1996-12-20T08:00:00.000000000,18446744073709551615,11753611437813598533,B, - 1996-12-20T09:00:00.000000000,18446744073709551615,3650215962958587783,A, - 1996-12-20T09:00:00.000000000,18446744073709551615,11753611437813598533,B, - 1996-12-20T10:00:00.000000000,18446744073709551615,3650215962958587783,A, - 1996-12-20T10:00:00.000000000,18446744073709551615,11753611437813598533,B, - 1996-12-20T11:00:00.000000000,18446744073709551615,3650215962958587783,A, - 1996-12-20T11:00:00.000000000,18446744073709551615,11753611437813598533,B, + 1996-12-20T08:00:00.000000000,18446744073709551615,2867199309159137213,B, + 1996-12-20T08:00:00.000000000,18446744073709551615,12960666915911099378,A, + 1996-12-20T09:00:00.000000000,18446744073709551615,2867199309159137213,B, + 1996-12-20T09:00:00.000000000,18446744073709551615,12960666915911099378,A, + 1996-12-20T10:00:00.000000000,18446744073709551615,2867199309159137213,B, + 1996-12-20T10:00:00.000000000,18446744073709551615,12960666915911099378,A, + 1996-12-20T11:00:00.000000000,18446744073709551615,2867199309159137213,B, + 1996-12-20T11:00:00.000000000,18446744073709551615,12960666915911099378,A, "###); } @@ -229,14 +229,14 @@ async fn test_time_of_produces_discrete_values() { async fn test_time_of_to_last_produces_continuous_values() { insta::assert_snapshot!(QueryFixture::new("{ t: Foo.n | time_of() | last() } | when(hourly())").run_to_csv(&data_fixture_over_hours_end_on_hour().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,t - 1996-12-20T08:00:00.000000000,18446744073709551615,3650215962958587783,A,1996-12-20T08:00:00.000000000 - 1996-12-20T08:00:00.000000000,18446744073709551615,11753611437813598533,B,1996-12-20T07:39:58.000000000 - 1996-12-20T09:00:00.000000000,18446744073709551615,3650215962958587783,A,1996-12-20T08:45:01.000000000 - 1996-12-20T09:00:00.000000000,18446744073709551615,11753611437813598533,B,1996-12-20T07:39:58.000000000 - 1996-12-20T10:00:00.000000000,18446744073709551615,3650215962958587783,A,1996-12-20T09:20:02.000000000 - 1996-12-20T10:00:00.000000000,18446744073709551615,11753611437813598533,B,1996-12-20T09:25:02.000000000 - 1996-12-20T11:00:00.000000000,18446744073709551615,3650215962958587783,A,1996-12-20T11:00:00.000000000 - 1996-12-20T11:00:00.000000000,18446744073709551615,11753611437813598533,B,1996-12-20T09:25:02.000000000 + 1996-12-20T08:00:00.000000000,18446744073709551615,2867199309159137213,B,1996-12-20T07:39:58.000000000 + 1996-12-20T08:00:00.000000000,18446744073709551615,12960666915911099378,A,1996-12-20T08:00:00.000000000 + 1996-12-20T09:00:00.000000000,18446744073709551615,2867199309159137213,B,1996-12-20T07:39:58.000000000 + 1996-12-20T09:00:00.000000000,18446744073709551615,12960666915911099378,A,1996-12-20T08:45:01.000000000 + 1996-12-20T10:00:00.000000000,18446744073709551615,2867199309159137213,B,1996-12-20T09:25:02.000000000 + 1996-12-20T10:00:00.000000000,18446744073709551615,12960666915911099378,A,1996-12-20T09:20:02.000000000 + 1996-12-20T11:00:00.000000000,18446744073709551615,2867199309159137213,B,1996-12-20T09:25:02.000000000 + 1996-12-20T11:00:00.000000000,18446744073709551615,12960666915911099378,A,1996-12-20T11:00:00.000000000 "###); } @@ -244,14 +244,14 @@ async fn test_time_of_to_last_produces_continuous_values() { async fn test_tick_with_discrete_values() { insta::assert_snapshot!(QueryFixture::new("Foo | when(hourly())").run_to_csv(&data_fixture_over_hours_end_on_hour().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,n,vegetable,bool - 1996-12-20T08:00:00.000000000,18446744073709551615,3650215962958587783,A,,,,,, - 1996-12-20T08:00:00.000000000,18446744073709551615,11753611437813598533,B,,,,,, - 1996-12-20T09:00:00.000000000,18446744073709551615,3650215962958587783,A,,,,,, - 1996-12-20T09:00:00.000000000,18446744073709551615,11753611437813598533,B,,,,,, - 1996-12-20T10:00:00.000000000,18446744073709551615,3650215962958587783,A,,,,,, - 1996-12-20T10:00:00.000000000,18446744073709551615,11753611437813598533,B,,,,,, - 1996-12-20T11:00:00.000000000,18446744073709551615,3650215962958587783,A,,,,,, - 1996-12-20T11:00:00.000000000,18446744073709551615,11753611437813598533,B,,,,,, + 1996-12-20T08:00:00.000000000,18446744073709551615,2867199309159137213,B,,,,,, + 1996-12-20T08:00:00.000000000,18446744073709551615,12960666915911099378,A,,,,,, + 1996-12-20T09:00:00.000000000,18446744073709551615,2867199309159137213,B,,,,,, + 1996-12-20T09:00:00.000000000,18446744073709551615,12960666915911099378,A,,,,,, + 1996-12-20T10:00:00.000000000,18446744073709551615,2867199309159137213,B,,,,,, + 1996-12-20T10:00:00.000000000,18446744073709551615,12960666915911099378,A,,,,,, + 1996-12-20T11:00:00.000000000,18446744073709551615,2867199309159137213,B,,,,,, + 1996-12-20T11:00:00.000000000,18446744073709551615,12960666915911099378,A,,,,,, "###); } @@ -259,14 +259,14 @@ async fn test_tick_with_discrete_values() { async fn test_since_tick_when_tick() { insta::assert_snapshot!(QueryFixture::new("{ when: count(Foo, window=since(hourly())) } | when(hourly())").run_to_csv(&data_fixture_over_hours_end_on_hour().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,when - 1996-12-20T08:00:00.000000000,18446744073709551615,3650215962958587783,A,2 - 1996-12-20T08:00:00.000000000,18446744073709551615,11753611437813598533,B,1 - 1996-12-20T09:00:00.000000000,18446744073709551615,3650215962958587783,A,2 - 1996-12-20T09:00:00.000000000,18446744073709551615,11753611437813598533,B,0 - 1996-12-20T10:00:00.000000000,18446744073709551615,3650215962958587783,A,1 - 1996-12-20T10:00:00.000000000,18446744073709551615,11753611437813598533,B,1 - 1996-12-20T11:00:00.000000000,18446744073709551615,3650215962958587783,A,2 - 1996-12-20T11:00:00.000000000,18446744073709551615,11753611437813598533,B,0 + 1996-12-20T08:00:00.000000000,18446744073709551615,2867199309159137213,B,1 + 1996-12-20T08:00:00.000000000,18446744073709551615,12960666915911099378,A,2 + 1996-12-20T09:00:00.000000000,18446744073709551615,2867199309159137213,B,0 + 1996-12-20T09:00:00.000000000,18446744073709551615,12960666915911099378,A,2 + 1996-12-20T10:00:00.000000000,18446744073709551615,2867199309159137213,B,1 + 1996-12-20T10:00:00.000000000,18446744073709551615,12960666915911099378,A,1 + 1996-12-20T11:00:00.000000000,18446744073709551615,2867199309159137213,B,0 + 1996-12-20T11:00:00.000000000,18446744073709551615,12960666915911099378,A,2 "###); } @@ -274,20 +274,20 @@ async fn test_since_tick_when_tick() { async fn test_since_minutely() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, minutely: count(Foo, window=since(minutely())) }").run_to_csv(&data_fixture_over_minutes().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,minutely - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,1 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,6.2,2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,3 - 1996-12-20T00:40:00.000000000,18446744073709551615,3650215962958587783,A,,3 - 1996-12-20T00:40:00.000000000,18446744073709551615,11753611437813598533,B,,1 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3.0,1 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,8.0,2 - 1996-12-20T00:40:02.000000000,9223372036854775808,11753611437813598533,B,8.0,1 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,,3 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,4 - 1996-12-20T00:41:00.000000000,18446744073709551615,3650215962958587783,A,,4 - 1996-12-20T00:41:00.000000000,18446744073709551615,11753611437813598533,B,,1 - 1996-12-20T00:41:04.000000000,9223372036854775808,3650215962958587783,A,10.0,1 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,1 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,6.2,2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,3 + 1996-12-20T00:40:00.000000000,18446744073709551615,2867199309159137213,B,,1 + 1996-12-20T00:40:00.000000000,18446744073709551615,12960666915911099378,A,,3 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3.0,1 + 1996-12-20T00:40:02.000000000,9223372036854775808,2867199309159137213,B,8.0,1 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,8.0,2 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,,3 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,4 + 1996-12-20T00:41:00.000000000,18446744073709551615,2867199309159137213,B,,1 + 1996-12-20T00:41:00.000000000,18446744073709551615,12960666915911099378,A,,4 + 1996-12-20T00:41:04.000000000,9223372036854775808,12960666915911099378,A,10.0,1 "###); } @@ -295,23 +295,23 @@ async fn test_since_minutely() { async fn test_if_hourly() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, n_if_hourly: Foo.n | if(hourly()) }").run_to_csv(&data_fixture_over_hours_end_on_hour().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,n_if_hourly - 1996-12-20T07:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0, - 1996-12-20T07:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9, - 1996-12-20T08:00:00.000000000,9223372036854775808,3650215962958587783,A,6.2, - 1996-12-20T08:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 1996-12-20T08:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1996-12-20T08:44:00.000000000,9223372036854775808,3650215962958587783,A,9.25, - 1996-12-20T08:45:01.000000000,9223372036854775808,3650215962958587783,A,3.0, - 1996-12-20T09:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 1996-12-20T09:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1996-12-20T09:20:02.000000000,9223372036854775808,3650215962958587783,A,8.0, - 1996-12-20T09:25:02.000000000,9223372036854775808,11753611437813598533,B,23.9, - 1996-12-20T10:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 1996-12-20T10:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1996-12-20T10:30:03.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T11:00:00.000000000,9223372036854775808,3650215962958587783,A,10.0, - 1996-12-20T11:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 1996-12-20T11:00:00.000000000,18446744073709551615,11753611437813598533,B,, + 1996-12-20T07:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0, + 1996-12-20T07:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9, + 1996-12-20T08:00:00.000000000,9223372036854775808,12960666915911099378,A,6.2, + 1996-12-20T08:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-20T08:00:00.000000000,18446744073709551615,12960666915911099378,A,, + 1996-12-20T08:44:00.000000000,9223372036854775808,12960666915911099378,A,9.25, + 1996-12-20T08:45:01.000000000,9223372036854775808,12960666915911099378,A,3.0, + 1996-12-20T09:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-20T09:00:00.000000000,18446744073709551615,12960666915911099378,A,, + 1996-12-20T09:20:02.000000000,9223372036854775808,12960666915911099378,A,8.0, + 1996-12-20T09:25:02.000000000,9223372036854775808,2867199309159137213,B,23.9, + 1996-12-20T10:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-20T10:00:00.000000000,18446744073709551615,12960666915911099378,A,, + 1996-12-20T10:30:03.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T11:00:00.000000000,9223372036854775808,12960666915911099378,A,10.0, + 1996-12-20T11:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-20T11:00:00.000000000,18446744073709551615,12960666915911099378,A,, "###); } @@ -319,22 +319,22 @@ async fn test_if_hourly() { async fn test_daily_else() { insta::assert_snapshot!(QueryFixture::new("{ sum_since: sum(Foo.n, window=since(daily())) | else(0) }").run_to_csv(&data_days_for_else().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_since - 1996-12-19T20:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0 - 1996-12-19T20:39:58.000000000,9223372036854775808,11753611437813598533,B,0.0 - 1996-12-20T00:00:00.000000000,18446744073709551615,3650215962958587783,A,10.0 - 1996-12-20T00:00:00.000000000,18446744073709551615,11753611437813598533,B,0.0 - 1996-12-21T00:00:00.000000000,18446744073709551615,3650215962958587783,A,0.0 - 1996-12-21T00:00:00.000000000,18446744073709551615,11753611437813598533,B,0.0 - 1996-12-21T00:32:59.000000000,9223372036854775808,3650215962958587783,A,6.2 - 1996-12-21T00:44:00.000000000,9223372036854775808,3650215962958587783,A,15.45 - 1996-12-21T00:45:01.000000000,9223372036854775808,3650215962958587783,A,18.45 - 1996-12-21T08:00:00.000000000,9223372036854775808,3650215962958587783,A,26.45 - 1996-12-22T00:00:00.000000000,18446744073709551615,3650215962958587783,A,26.45 - 1996-12-22T00:00:00.000000000,18446744073709551615,11753611437813598533,B,0.0 - 1996-12-22T00:30:03.000000000,9223372036854775808,3650215962958587783,A,0.0 - 1996-12-23T00:00:00.000000000,18446744073709551615,3650215962958587783,A,0.0 - 1996-12-23T00:00:00.000000000,18446744073709551615,11753611437813598533,B,0.0 - 1996-12-23T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0 + 1996-12-19T20:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0 + 1996-12-19T20:39:58.000000000,9223372036854775808,2867199309159137213,B,0.0 + 1996-12-20T00:00:00.000000000,18446744073709551615,2867199309159137213,B,0.0 + 1996-12-20T00:00:00.000000000,18446744073709551615,12960666915911099378,A,10.0 + 1996-12-21T00:00:00.000000000,18446744073709551615,2867199309159137213,B,0.0 + 1996-12-21T00:00:00.000000000,18446744073709551615,12960666915911099378,A,0.0 + 1996-12-21T00:32:59.000000000,9223372036854775808,12960666915911099378,A,6.2 + 1996-12-21T00:44:00.000000000,9223372036854775808,12960666915911099378,A,15.45 + 1996-12-21T00:45:01.000000000,9223372036854775808,12960666915911099378,A,18.45 + 1996-12-21T08:00:00.000000000,9223372036854775808,12960666915911099378,A,26.45 + 1996-12-22T00:00:00.000000000,18446744073709551615,2867199309159137213,B,0.0 + 1996-12-22T00:00:00.000000000,18446744073709551615,12960666915911099378,A,26.45 + 1996-12-22T00:30:03.000000000,9223372036854775808,12960666915911099378,A,0.0 + 1996-12-23T00:00:00.000000000,18446744073709551615,2867199309159137213,B,0.0 + 1996-12-23T00:00:00.000000000,18446744073709551615,12960666915911099378,A,0.0 + 1996-12-23T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0 "###); } @@ -342,22 +342,22 @@ async fn test_daily_else() { async fn test_daily_else_to_last() { insta::assert_snapshot!(QueryFixture::new("{ sum_since: sum(Foo.n, window=since(daily())) | else(0) | last() }").run_to_csv(&data_days_for_else().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_since - 1996-12-19T20:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0 - 1996-12-19T20:39:58.000000000,9223372036854775808,11753611437813598533,B,0.0 - 1996-12-20T00:00:00.000000000,18446744073709551615,3650215962958587783,A,10.0 - 1996-12-20T00:00:00.000000000,18446744073709551615,11753611437813598533,B,0.0 - 1996-12-21T00:00:00.000000000,18446744073709551615,3650215962958587783,A,0.0 - 1996-12-21T00:00:00.000000000,18446744073709551615,11753611437813598533,B,0.0 - 1996-12-21T00:32:59.000000000,9223372036854775808,3650215962958587783,A,6.2 - 1996-12-21T00:44:00.000000000,9223372036854775808,3650215962958587783,A,15.45 - 1996-12-21T00:45:01.000000000,9223372036854775808,3650215962958587783,A,18.45 - 1996-12-21T08:00:00.000000000,9223372036854775808,3650215962958587783,A,26.45 - 1996-12-22T00:00:00.000000000,18446744073709551615,3650215962958587783,A,26.45 - 1996-12-22T00:00:00.000000000,18446744073709551615,11753611437813598533,B,0.0 - 1996-12-22T00:30:03.000000000,9223372036854775808,3650215962958587783,A,0.0 - 1996-12-23T00:00:00.000000000,18446744073709551615,3650215962958587783,A,0.0 - 1996-12-23T00:00:00.000000000,18446744073709551615,11753611437813598533,B,0.0 - 1996-12-23T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0 + 1996-12-19T20:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0 + 1996-12-19T20:39:58.000000000,9223372036854775808,2867199309159137213,B,0.0 + 1996-12-20T00:00:00.000000000,18446744073709551615,2867199309159137213,B,0.0 + 1996-12-20T00:00:00.000000000,18446744073709551615,12960666915911099378,A,10.0 + 1996-12-21T00:00:00.000000000,18446744073709551615,2867199309159137213,B,0.0 + 1996-12-21T00:00:00.000000000,18446744073709551615,12960666915911099378,A,0.0 + 1996-12-21T00:32:59.000000000,9223372036854775808,12960666915911099378,A,6.2 + 1996-12-21T00:44:00.000000000,9223372036854775808,12960666915911099378,A,15.45 + 1996-12-21T00:45:01.000000000,9223372036854775808,12960666915911099378,A,18.45 + 1996-12-21T08:00:00.000000000,9223372036854775808,12960666915911099378,A,26.45 + 1996-12-22T00:00:00.000000000,18446744073709551615,2867199309159137213,B,0.0 + 1996-12-22T00:00:00.000000000,18446744073709551615,12960666915911099378,A,26.45 + 1996-12-22T00:30:03.000000000,9223372036854775808,12960666915911099378,A,0.0 + 1996-12-23T00:00:00.000000000,18446744073709551615,2867199309159137213,B,0.0 + 1996-12-23T00:00:00.000000000,18446744073709551615,12960666915911099378,A,0.0 + 1996-12-23T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0 "###); } @@ -365,23 +365,23 @@ async fn test_daily_else_to_last() { async fn test_since_daily() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, sum_since: sum(Foo.n, window=since(daily())) }").run_to_csv(&data_fixture_over_days().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,sum_since - 1996-12-19T20:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 - 1996-12-19T20:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,3.9 - 1996-12-20T00:00:00.000000000,18446744073709551615,3650215962958587783,A,,10.0 - 1996-12-20T00:00:00.000000000,18446744073709551615,11753611437813598533,B,,3.9 - 1996-12-21T00:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 1996-12-21T00:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1996-12-21T00:32:59.000000000,9223372036854775808,3650215962958587783,A,6.2,6.2 - 1996-12-21T00:44:00.000000000,9223372036854775808,3650215962958587783,A,9.25,15.45 - 1996-12-21T00:45:01.000000000,9223372036854775808,3650215962958587783,A,3.0,18.45 - 1996-12-21T08:00:00.000000000,9223372036854775808,3650215962958587783,A,8.0,26.45 - 1996-12-22T00:00:00.000000000,18446744073709551615,3650215962958587783,A,,26.45 - 1996-12-22T00:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1996-12-22T00:25:02.000000000,9223372036854775808,11753611437813598533,B,23.9,23.9 - 1996-12-22T00:30:03.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-23T00:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 1996-12-23T00:00:00.000000000,18446744073709551615,11753611437813598533,B,,23.9 - 1996-12-23T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 + 1996-12-19T20:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 + 1996-12-19T20:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,3.9 + 1996-12-20T00:00:00.000000000,18446744073709551615,2867199309159137213,B,,3.9 + 1996-12-20T00:00:00.000000000,18446744073709551615,12960666915911099378,A,,10.0 + 1996-12-21T00:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-21T00:00:00.000000000,18446744073709551615,12960666915911099378,A,, + 1996-12-21T00:32:59.000000000,9223372036854775808,12960666915911099378,A,6.2,6.2 + 1996-12-21T00:44:00.000000000,9223372036854775808,12960666915911099378,A,9.25,15.45 + 1996-12-21T00:45:01.000000000,9223372036854775808,12960666915911099378,A,3.0,18.45 + 1996-12-21T08:00:00.000000000,9223372036854775808,12960666915911099378,A,8.0,26.45 + 1996-12-22T00:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-22T00:00:00.000000000,18446744073709551615,12960666915911099378,A,,26.45 + 1996-12-22T00:25:02.000000000,9223372036854775808,2867199309159137213,B,23.9,23.9 + 1996-12-22T00:30:03.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-23T00:00:00.000000000,18446744073709551615,2867199309159137213,B,,23.9 + 1996-12-23T00:00:00.000000000,18446744073709551615,12960666915911099378,A,, + 1996-12-23T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 "###); } @@ -389,23 +389,23 @@ async fn test_since_daily() { async fn test_since_hourly() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, sum_since: sum(Foo.n, window=since(hourly())) }").run_to_csv(&data_fixture_over_hours().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,sum_since - 1996-12-20T07:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 - 1996-12-20T07:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,3.9 - 1996-12-20T08:00:00.000000000,9223372036854775808,3650215962958587783,A,6.2,16.2 - 1996-12-20T08:00:00.000000000,18446744073709551615,3650215962958587783,A,,16.2 - 1996-12-20T08:00:00.000000000,18446744073709551615,11753611437813598533,B,,3.9 - 1996-12-20T08:44:00.000000000,9223372036854775808,3650215962958587783,A,9.25,9.25 - 1996-12-20T08:45:01.000000000,9223372036854775808,3650215962958587783,A,3.0,12.25 - 1996-12-20T09:00:00.000000000,18446744073709551615,3650215962958587783,A,,12.25 - 1996-12-20T09:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1996-12-20T09:20:02.000000000,9223372036854775808,3650215962958587783,A,8.0,8.0 - 1996-12-20T09:25:02.000000000,9223372036854775808,11753611437813598533,B,23.9,23.9 - 1996-12-20T10:00:00.000000000,18446744073709551615,3650215962958587783,A,,8.0 - 1996-12-20T10:00:00.000000000,18446744073709551615,11753611437813598533,B,,23.9 - 1996-12-20T10:30:03.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T11:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 1996-12-20T11:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1996-12-20T11:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 + 1996-12-20T07:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 + 1996-12-20T07:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,3.9 + 1996-12-20T08:00:00.000000000,9223372036854775808,12960666915911099378,A,6.2,16.2 + 1996-12-20T08:00:00.000000000,18446744073709551615,2867199309159137213,B,,3.9 + 1996-12-20T08:00:00.000000000,18446744073709551615,12960666915911099378,A,,16.2 + 1996-12-20T08:44:00.000000000,9223372036854775808,12960666915911099378,A,9.25,9.25 + 1996-12-20T08:45:01.000000000,9223372036854775808,12960666915911099378,A,3.0,12.25 + 1996-12-20T09:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-20T09:00:00.000000000,18446744073709551615,12960666915911099378,A,,12.25 + 1996-12-20T09:20:02.000000000,9223372036854775808,12960666915911099378,A,8.0,8.0 + 1996-12-20T09:25:02.000000000,9223372036854775808,2867199309159137213,B,23.9,23.9 + 1996-12-20T10:00:00.000000000,18446744073709551615,2867199309159137213,B,,23.9 + 1996-12-20T10:00:00.000000000,18446744073709551615,12960666915911099378,A,,8.0 + 1996-12-20T10:30:03.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T11:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-20T11:00:00.000000000,18446744073709551615,12960666915911099378,A,, + 1996-12-20T11:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 "###); } @@ -413,23 +413,23 @@ async fn test_since_hourly() { async fn test_since_hourly_end_on_hour() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, sum_since: sum(Foo.n, window=since(hourly())) }").run_to_csv(&data_fixture_over_hours_end_on_hour().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,sum_since - 1996-12-20T07:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 - 1996-12-20T07:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,3.9 - 1996-12-20T08:00:00.000000000,9223372036854775808,3650215962958587783,A,6.2,16.2 - 1996-12-20T08:00:00.000000000,18446744073709551615,3650215962958587783,A,,16.2 - 1996-12-20T08:00:00.000000000,18446744073709551615,11753611437813598533,B,,3.9 - 1996-12-20T08:44:00.000000000,9223372036854775808,3650215962958587783,A,9.25,9.25 - 1996-12-20T08:45:01.000000000,9223372036854775808,3650215962958587783,A,3.0,12.25 - 1996-12-20T09:00:00.000000000,18446744073709551615,3650215962958587783,A,,12.25 - 1996-12-20T09:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1996-12-20T09:20:02.000000000,9223372036854775808,3650215962958587783,A,8.0,8.0 - 1996-12-20T09:25:02.000000000,9223372036854775808,11753611437813598533,B,23.9,23.9 - 1996-12-20T10:00:00.000000000,18446744073709551615,3650215962958587783,A,,8.0 - 1996-12-20T10:00:00.000000000,18446744073709551615,11753611437813598533,B,,23.9 - 1996-12-20T10:30:03.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T11:00:00.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 - 1996-12-20T11:00:00.000000000,18446744073709551615,3650215962958587783,A,,10.0 - 1996-12-20T11:00:00.000000000,18446744073709551615,11753611437813598533,B,, + 1996-12-20T07:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 + 1996-12-20T07:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,3.9 + 1996-12-20T08:00:00.000000000,9223372036854775808,12960666915911099378,A,6.2,16.2 + 1996-12-20T08:00:00.000000000,18446744073709551615,2867199309159137213,B,,3.9 + 1996-12-20T08:00:00.000000000,18446744073709551615,12960666915911099378,A,,16.2 + 1996-12-20T08:44:00.000000000,9223372036854775808,12960666915911099378,A,9.25,9.25 + 1996-12-20T08:45:01.000000000,9223372036854775808,12960666915911099378,A,3.0,12.25 + 1996-12-20T09:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-20T09:00:00.000000000,18446744073709551615,12960666915911099378,A,,12.25 + 1996-12-20T09:20:02.000000000,9223372036854775808,12960666915911099378,A,8.0,8.0 + 1996-12-20T09:25:02.000000000,9223372036854775808,2867199309159137213,B,23.9,23.9 + 1996-12-20T10:00:00.000000000,18446744073709551615,2867199309159137213,B,,23.9 + 1996-12-20T10:00:00.000000000,18446744073709551615,12960666915911099378,A,,8.0 + 1996-12-20T10:30:03.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T11:00:00.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 + 1996-12-20T11:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-20T11:00:00.000000000,18446744073709551615,12960666915911099378,A,,10.0 "###); } @@ -437,14 +437,14 @@ async fn test_since_hourly_end_on_hour() { async fn test_when_hourly_end_on_hour() { insta::assert_snapshot!(QueryFixture::new("{ sum_when_tick: sum(Foo.n) | when(hourly()) }").run_to_csv(&data_fixture_over_hours_end_on_hour().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_when_tick - 1996-12-20T08:00:00.000000000,18446744073709551615,3650215962958587783,A,16.2 - 1996-12-20T08:00:00.000000000,18446744073709551615,11753611437813598533,B,3.9 - 1996-12-20T09:00:00.000000000,18446744073709551615,3650215962958587783,A,28.45 - 1996-12-20T09:00:00.000000000,18446744073709551615,11753611437813598533,B,3.9 - 1996-12-20T10:00:00.000000000,18446744073709551615,3650215962958587783,A,36.45 - 1996-12-20T10:00:00.000000000,18446744073709551615,11753611437813598533,B,27.799999999999997 - 1996-12-20T11:00:00.000000000,18446744073709551615,3650215962958587783,A,46.45 - 1996-12-20T11:00:00.000000000,18446744073709551615,11753611437813598533,B,27.799999999999997 + 1996-12-20T08:00:00.000000000,18446744073709551615,2867199309159137213,B,3.9 + 1996-12-20T08:00:00.000000000,18446744073709551615,12960666915911099378,A,16.2 + 1996-12-20T09:00:00.000000000,18446744073709551615,2867199309159137213,B,3.9 + 1996-12-20T09:00:00.000000000,18446744073709551615,12960666915911099378,A,28.45 + 1996-12-20T10:00:00.000000000,18446744073709551615,2867199309159137213,B,27.799999999999997 + 1996-12-20T10:00:00.000000000,18446744073709551615,12960666915911099378,A,36.45 + 1996-12-20T11:00:00.000000000,18446744073709551615,2867199309159137213,B,27.799999999999997 + 1996-12-20T11:00:00.000000000,18446744073709551615,12960666915911099378,A,46.45 "###); } @@ -452,8 +452,8 @@ async fn test_when_hourly_end_on_hour() { async fn test_when_hourly_end_on_hour_final_results() { insta::assert_snapshot!(QueryFixture::new("{ sum_on_hour: sum(Foo.n) | when(hourly()) }").with_final_results().run_to_csv(&data_fixture_over_hours_end_on_hour().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_on_hour - 1996-12-20T11:00:00.000000001,18446744073709551615,3650215962958587783,A,46.45 - 1996-12-20T11:00:00.000000001,18446744073709551615,11753611437813598533,B,27.799999999999997 + 1996-12-20T11:00:00.000000001,18446744073709551615,2867199309159137213,B,27.799999999999997 + 1996-12-20T11:00:00.000000001,18446744073709551615,12960666915911099378,A,46.45 "###); } @@ -462,23 +462,23 @@ async fn test_when_hourly_end_on_hour_final_results() { async fn test_since_daily_over_span_of_days() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, sum_since: sum(Foo.n, window=since(daily())) }").run_to_csv(&data_fixture_over_days().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,sum_since - 1996-12-19T20:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 - 1996-12-19T20:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,3.9 - 1996-12-20T00:00:00.000000000,18446744073709551615,3650215962958587783,A,,10.0 - 1996-12-20T00:00:00.000000000,18446744073709551615,11753611437813598533,B,,3.9 - 1996-12-21T00:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 1996-12-21T00:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1996-12-21T00:32:59.000000000,9223372036854775808,3650215962958587783,A,6.2,6.2 - 1996-12-21T00:44:00.000000000,9223372036854775808,3650215962958587783,A,9.25,15.45 - 1996-12-21T00:45:01.000000000,9223372036854775808,3650215962958587783,A,3.0,18.45 - 1996-12-21T08:00:00.000000000,9223372036854775808,3650215962958587783,A,8.0,26.45 - 1996-12-22T00:00:00.000000000,18446744073709551615,3650215962958587783,A,,26.45 - 1996-12-22T00:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1996-12-22T00:25:02.000000000,9223372036854775808,11753611437813598533,B,23.9,23.9 - 1996-12-22T00:30:03.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-23T00:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 1996-12-23T00:00:00.000000000,18446744073709551615,11753611437813598533,B,,23.9 - 1996-12-23T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 + 1996-12-19T20:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 + 1996-12-19T20:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,3.9 + 1996-12-20T00:00:00.000000000,18446744073709551615,2867199309159137213,B,,3.9 + 1996-12-20T00:00:00.000000000,18446744073709551615,12960666915911099378,A,,10.0 + 1996-12-21T00:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-21T00:00:00.000000000,18446744073709551615,12960666915911099378,A,, + 1996-12-21T00:32:59.000000000,9223372036854775808,12960666915911099378,A,6.2,6.2 + 1996-12-21T00:44:00.000000000,9223372036854775808,12960666915911099378,A,9.25,15.45 + 1996-12-21T00:45:01.000000000,9223372036854775808,12960666915911099378,A,3.0,18.45 + 1996-12-21T08:00:00.000000000,9223372036854775808,12960666915911099378,A,8.0,26.45 + 1996-12-22T00:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1996-12-22T00:00:00.000000000,18446744073709551615,12960666915911099378,A,,26.45 + 1996-12-22T00:25:02.000000000,9223372036854775808,2867199309159137213,B,23.9,23.9 + 1996-12-22T00:30:03.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-23T00:00:00.000000000,18446744073709551615,2867199309159137213,B,,23.9 + 1996-12-23T00:00:00.000000000,18446744073709551615,12960666915911099378,A,, + 1996-12-23T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 "###); } @@ -486,25 +486,25 @@ async fn test_since_daily_over_span_of_days() { async fn test_since_monthly() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, sum_since: sum(Foo.n, window=since(monthly())) }").run_to_csv(&data_fixture_over_months().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,sum_since - 1996-12-19T20:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 - 1996-12-19T20:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,3.9 - 1997-01-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A,,10.0 - 1997-01-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B,,3.9 - 1997-01-21T00:32:59.000000000,9223372036854775808,3650215962958587783,A,6.2,6.2 - 1997-01-21T00:44:00.000000000,9223372036854775808,3650215962958587783,A,9.25,15.45 - 1997-02-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A,,15.45 - 1997-02-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1997-02-21T00:45:01.000000000,9223372036854775808,3650215962958587783,A,3.0,3.0 - 1997-03-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A,,3.0 - 1997-03-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1997-04-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 1997-04-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1997-04-22T00:20:02.000000000,9223372036854775808,3650215962958587783,A,8.0,8.0 - 1997-04-22T00:25:02.000000000,9223372036854775808,11753611437813598533,B,23.9,23.9 - 1997-04-22T00:30:03.000000000,9223372036854775808,3650215962958587783,A,,8.0 - 1997-05-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A,,8.0 - 1997-05-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B,,23.9 - 1997-05-23T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 + 1996-12-19T20:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 + 1996-12-19T20:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,3.9 + 1997-01-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B,,3.9 + 1997-01-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A,,10.0 + 1997-01-21T00:32:59.000000000,9223372036854775808,12960666915911099378,A,6.2,6.2 + 1997-01-21T00:44:00.000000000,9223372036854775808,12960666915911099378,A,9.25,15.45 + 1997-02-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1997-02-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A,,15.45 + 1997-02-21T00:45:01.000000000,9223372036854775808,12960666915911099378,A,3.0,3.0 + 1997-03-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1997-03-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A,,3.0 + 1997-04-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1997-04-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A,, + 1997-04-22T00:20:02.000000000,9223372036854775808,12960666915911099378,A,8.0,8.0 + 1997-04-22T00:25:02.000000000,9223372036854775808,2867199309159137213,B,23.9,23.9 + 1997-04-22T00:30:03.000000000,9223372036854775808,12960666915911099378,A,,8.0 + 1997-05-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B,,23.9 + 1997-05-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A,,8.0 + 1997-05-23T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 "###); } @@ -512,25 +512,25 @@ async fn test_since_monthly() { async fn test_since_yearly() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, sum_since: sum(Foo.n, window=since(yearly())) }").run_to_csv(&data_fixture_over_years().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,sum_since - 1996-12-19T20:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 - 1996-12-19T20:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,3.9 - 1997-01-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A,,10.0 - 1997-01-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B,,3.9 - 1998-01-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 1998-01-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1998-01-21T00:32:59.000000000,9223372036854775808,3650215962958587783,A,6.2,6.2 - 1998-01-21T00:44:00.000000000,9223372036854775808,3650215962958587783,A,9.25,15.45 - 1999-01-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A,,15.45 - 1999-01-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 1999-02-21T00:45:01.000000000,9223372036854775808,3650215962958587783,A,3.0,3.0 - 1999-04-22T00:20:02.000000000,9223372036854775808,3650215962958587783,A,8.0,11.0 - 2000-01-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A,,11.0 - 2000-01-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 2001-01-01T00:00:00.000000000,18446744073709551615,3650215962958587783,A,, - 2001-01-01T00:00:00.000000000,18446744073709551615,11753611437813598533,B,, - 2001-04-22T00:25:02.000000000,9223372036854775808,11753611437813598533,B,23.9,23.9 - 2001-04-22T00:30:03.000000000,9223372036854775808,3650215962958587783,A,, - 2001-05-23T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 + 1996-12-19T20:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 + 1996-12-19T20:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,3.9 + 1997-01-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B,,3.9 + 1997-01-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A,,10.0 + 1998-01-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1998-01-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A,, + 1998-01-21T00:32:59.000000000,9223372036854775808,12960666915911099378,A,6.2,6.2 + 1998-01-21T00:44:00.000000000,9223372036854775808,12960666915911099378,A,9.25,15.45 + 1999-01-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 1999-01-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A,,15.45 + 1999-02-21T00:45:01.000000000,9223372036854775808,12960666915911099378,A,3.0,3.0 + 1999-04-22T00:20:02.000000000,9223372036854775808,12960666915911099378,A,8.0,11.0 + 2000-01-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 2000-01-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A,,11.0 + 2001-01-01T00:00:00.000000000,18446744073709551615,2867199309159137213,B,, + 2001-01-01T00:00:00.000000000,18446744073709551615,12960666915911099378,A,, + 2001-04-22T00:25:02.000000000,9223372036854775808,2867199309159137213,B,23.9,23.9 + 2001-04-22T00:30:03.000000000,9223372036854775808,12960666915911099378,A,, + 2001-05-23T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 "###); } @@ -538,14 +538,14 @@ async fn test_since_yearly() { async fn test_tick_with_when_produces_values_on_window_bounds() { insta::assert_snapshot!(QueryFixture::new("{ sum_when_day: sum(Foo.n, window=since(daily())) | when(daily()) }").run_to_csv(&data_fixture_over_days().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum_when_day - 1996-12-20T00:00:00.000000000,18446744073709551615,3650215962958587783,A,10.0 - 1996-12-20T00:00:00.000000000,18446744073709551615,11753611437813598533,B,3.9 - 1996-12-21T00:00:00.000000000,18446744073709551615,3650215962958587783,A, - 1996-12-21T00:00:00.000000000,18446744073709551615,11753611437813598533,B, - 1996-12-22T00:00:00.000000000,18446744073709551615,3650215962958587783,A,26.45 - 1996-12-22T00:00:00.000000000,18446744073709551615,11753611437813598533,B, - 1996-12-23T00:00:00.000000000,18446744073709551615,3650215962958587783,A, - 1996-12-23T00:00:00.000000000,18446744073709551615,11753611437813598533,B,23.9 + 1996-12-20T00:00:00.000000000,18446744073709551615,2867199309159137213,B,3.9 + 1996-12-20T00:00:00.000000000,18446744073709551615,12960666915911099378,A,10.0 + 1996-12-21T00:00:00.000000000,18446744073709551615,2867199309159137213,B, + 1996-12-21T00:00:00.000000000,18446744073709551615,12960666915911099378,A, + 1996-12-22T00:00:00.000000000,18446744073709551615,2867199309159137213,B, + 1996-12-22T00:00:00.000000000,18446744073709551615,12960666915911099378,A,26.45 + 1996-12-23T00:00:00.000000000,18446744073709551615,2867199309159137213,B,23.9 + 1996-12-23T00:00:00.000000000,18446744073709551615,12960666915911099378,A, "###); } @@ -557,8 +557,8 @@ async fn test_tick_when_finished() { // "finished", but we expect to get the most recent (new) value. insta::assert_snapshot!(QueryFixture::new("{ time: Numbers.time, sum: sum(Numbers.m) } | last() | when(finished())").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,sum - 1996-12-20T00:40:02.000000001,18446744073709551615,3650215962958587783,A,1996-12-20T00:40:02.000000000,34 - 1996-12-20T00:40:02.000000001,18446744073709551615,11753611437813598533,B,1996-12-20T00:39:58.000000000,24 + 1996-12-20T00:40:02.000000001,18446744073709551615,2867199309159137213,B,1996-12-20T00:39:58.000000000,24 + 1996-12-20T00:40:02.000000001,18446744073709551615,12960666915911099378,A,1996-12-20T00:40:02.000000000,34 "###); } @@ -566,23 +566,23 @@ async fn test_tick_when_finished() { async fn test_count_sliding_tick_daily() { insta::assert_snapshot!(QueryFixture::new("{ count: count(Foo), sliding_count: count(Foo, window=sliding(2, daily())) }").run_to_csv(&data_fixture_over_days().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,count,sliding_count - 1996-12-19T20:39:57.000000000,9223372036854775808,3650215962958587783,A,1,1 - 1996-12-19T20:39:58.000000000,9223372036854775808,11753611437813598533,B,1,1 - 1996-12-20T00:00:00.000000000,18446744073709551615,3650215962958587783,A,1,1 - 1996-12-20T00:00:00.000000000,18446744073709551615,11753611437813598533,B,1,1 - 1996-12-21T00:00:00.000000000,18446744073709551615,3650215962958587783,A,1,1 - 1996-12-21T00:00:00.000000000,18446744073709551615,11753611437813598533,B,1,1 - 1996-12-21T00:32:59.000000000,9223372036854775808,3650215962958587783,A,2,1 - 1996-12-21T00:44:00.000000000,9223372036854775808,3650215962958587783,A,3,2 - 1996-12-21T00:45:01.000000000,9223372036854775808,3650215962958587783,A,4,3 - 1996-12-21T08:00:00.000000000,9223372036854775808,3650215962958587783,A,5,4 - 1996-12-22T00:00:00.000000000,18446744073709551615,3650215962958587783,A,5,4 - 1996-12-22T00:00:00.000000000,18446744073709551615,11753611437813598533,B,1,0 - 1996-12-22T00:25:02.000000000,9223372036854775808,11753611437813598533,B,2,1 - 1996-12-22T00:30:03.000000000,9223372036854775808,3650215962958587783,A,6,5 - 1996-12-23T00:00:00.000000000,18446744073709551615,3650215962958587783,A,6,5 - 1996-12-23T00:00:00.000000000,18446744073709551615,11753611437813598533,B,2,1 - 1996-12-23T00:40:04.000000000,9223372036854775808,3650215962958587783,A,7,2 + 1996-12-19T20:39:57.000000000,9223372036854775808,12960666915911099378,A,1,1 + 1996-12-19T20:39:58.000000000,9223372036854775808,2867199309159137213,B,1,1 + 1996-12-20T00:00:00.000000000,18446744073709551615,2867199309159137213,B,1,1 + 1996-12-20T00:00:00.000000000,18446744073709551615,12960666915911099378,A,1,1 + 1996-12-21T00:00:00.000000000,18446744073709551615,2867199309159137213,B,1,1 + 1996-12-21T00:00:00.000000000,18446744073709551615,12960666915911099378,A,1,1 + 1996-12-21T00:32:59.000000000,9223372036854775808,12960666915911099378,A,2,1 + 1996-12-21T00:44:00.000000000,9223372036854775808,12960666915911099378,A,3,2 + 1996-12-21T00:45:01.000000000,9223372036854775808,12960666915911099378,A,4,3 + 1996-12-21T08:00:00.000000000,9223372036854775808,12960666915911099378,A,5,4 + 1996-12-22T00:00:00.000000000,18446744073709551615,2867199309159137213,B,1,0 + 1996-12-22T00:00:00.000000000,18446744073709551615,12960666915911099378,A,5,4 + 1996-12-22T00:25:02.000000000,9223372036854775808,2867199309159137213,B,2,1 + 1996-12-22T00:30:03.000000000,9223372036854775808,12960666915911099378,A,6,5 + 1996-12-23T00:00:00.000000000,18446744073709551615,2867199309159137213,B,2,1 + 1996-12-23T00:00:00.000000000,18446744073709551615,12960666915911099378,A,6,5 + 1996-12-23T00:40:04.000000000,9223372036854775808,12960666915911099378,A,7,2 "###); } @@ -590,23 +590,23 @@ async fn test_count_sliding_tick_daily() { async fn test_count_daily_sliding_equivalent_to_since() { insta::assert_snapshot!(QueryFixture::new("{ since: count(Foo, window=since(daily())), sliding: count(Foo, window=sliding(1, daily())) }").run_to_csv(&data_fixture_over_days().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,since,sliding - 1996-12-19T20:39:57.000000000,9223372036854775808,3650215962958587783,A,1,1 - 1996-12-19T20:39:58.000000000,9223372036854775808,11753611437813598533,B,1,1 - 1996-12-20T00:00:00.000000000,18446744073709551615,3650215962958587783,A,1,1 - 1996-12-20T00:00:00.000000000,18446744073709551615,11753611437813598533,B,1,1 - 1996-12-21T00:00:00.000000000,18446744073709551615,3650215962958587783,A,0,0 - 1996-12-21T00:00:00.000000000,18446744073709551615,11753611437813598533,B,0,0 - 1996-12-21T00:32:59.000000000,9223372036854775808,3650215962958587783,A,1,1 - 1996-12-21T00:44:00.000000000,9223372036854775808,3650215962958587783,A,2,2 - 1996-12-21T00:45:01.000000000,9223372036854775808,3650215962958587783,A,3,3 - 1996-12-21T08:00:00.000000000,9223372036854775808,3650215962958587783,A,4,4 - 1996-12-22T00:00:00.000000000,18446744073709551615,3650215962958587783,A,4,4 - 1996-12-22T00:00:00.000000000,18446744073709551615,11753611437813598533,B,0,0 - 1996-12-22T00:25:02.000000000,9223372036854775808,11753611437813598533,B,1,1 - 1996-12-22T00:30:03.000000000,9223372036854775808,3650215962958587783,A,1,1 - 1996-12-23T00:00:00.000000000,18446744073709551615,3650215962958587783,A,1,1 - 1996-12-23T00:00:00.000000000,18446744073709551615,11753611437813598533,B,1,1 - 1996-12-23T00:40:04.000000000,9223372036854775808,3650215962958587783,A,1,1 + 1996-12-19T20:39:57.000000000,9223372036854775808,12960666915911099378,A,1,1 + 1996-12-19T20:39:58.000000000,9223372036854775808,2867199309159137213,B,1,1 + 1996-12-20T00:00:00.000000000,18446744073709551615,2867199309159137213,B,1,1 + 1996-12-20T00:00:00.000000000,18446744073709551615,12960666915911099378,A,1,1 + 1996-12-21T00:00:00.000000000,18446744073709551615,2867199309159137213,B,0,0 + 1996-12-21T00:00:00.000000000,18446744073709551615,12960666915911099378,A,0,0 + 1996-12-21T00:32:59.000000000,9223372036854775808,12960666915911099378,A,1,1 + 1996-12-21T00:44:00.000000000,9223372036854775808,12960666915911099378,A,2,2 + 1996-12-21T00:45:01.000000000,9223372036854775808,12960666915911099378,A,3,3 + 1996-12-21T08:00:00.000000000,9223372036854775808,12960666915911099378,A,4,4 + 1996-12-22T00:00:00.000000000,18446744073709551615,2867199309159137213,B,0,0 + 1996-12-22T00:00:00.000000000,18446744073709551615,12960666915911099378,A,4,4 + 1996-12-22T00:25:02.000000000,9223372036854775808,2867199309159137213,B,1,1 + 1996-12-22T00:30:03.000000000,9223372036854775808,12960666915911099378,A,1,1 + 1996-12-23T00:00:00.000000000,18446744073709551615,2867199309159137213,B,1,1 + 1996-12-23T00:00:00.000000000,18446744073709551615,12960666915911099378,A,1,1 + 1996-12-23T00:40:04.000000000,9223372036854775808,12960666915911099378,A,1,1 "###); } @@ -614,23 +614,23 @@ async fn test_count_daily_sliding_equivalent_to_since() { async fn test_max_subsort_input_merges_correctly() { insta::assert_snapshot!(QueryFixture::new("{ field: count(Foo, window=since(daily())) | count(window=since(daily())) }").run_to_csv(&data_fixture_over_days().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,field - 1996-12-19T20:39:57.000000000,9223372036854775808,3650215962958587783,A,1 - 1996-12-19T20:39:58.000000000,9223372036854775808,11753611437813598533,B,1 - 1996-12-20T00:00:00.000000000,18446744073709551615,3650215962958587783,A,2 - 1996-12-20T00:00:00.000000000,18446744073709551615,11753611437813598533,B,2 - 1996-12-21T00:00:00.000000000,18446744073709551615,3650215962958587783,A,1 - 1996-12-21T00:00:00.000000000,18446744073709551615,11753611437813598533,B,1 - 1996-12-21T00:32:59.000000000,9223372036854775808,3650215962958587783,A,1 - 1996-12-21T00:44:00.000000000,9223372036854775808,3650215962958587783,A,2 - 1996-12-21T00:45:01.000000000,9223372036854775808,3650215962958587783,A,3 - 1996-12-21T08:00:00.000000000,9223372036854775808,3650215962958587783,A,4 - 1996-12-22T00:00:00.000000000,18446744073709551615,3650215962958587783,A,5 - 1996-12-22T00:00:00.000000000,18446744073709551615,11753611437813598533,B,1 - 1996-12-22T00:25:02.000000000,9223372036854775808,11753611437813598533,B,1 - 1996-12-22T00:30:03.000000000,9223372036854775808,3650215962958587783,A,1 - 1996-12-23T00:00:00.000000000,18446744073709551615,3650215962958587783,A,2 - 1996-12-23T00:00:00.000000000,18446744073709551615,11753611437813598533,B,2 - 1996-12-23T00:40:04.000000000,9223372036854775808,3650215962958587783,A,1 + 1996-12-19T20:39:57.000000000,9223372036854775808,12960666915911099378,A,1 + 1996-12-19T20:39:58.000000000,9223372036854775808,2867199309159137213,B,1 + 1996-12-20T00:00:00.000000000,18446744073709551615,2867199309159137213,B,2 + 1996-12-20T00:00:00.000000000,18446744073709551615,12960666915911099378,A,2 + 1996-12-21T00:00:00.000000000,18446744073709551615,2867199309159137213,B,1 + 1996-12-21T00:00:00.000000000,18446744073709551615,12960666915911099378,A,1 + 1996-12-21T00:32:59.000000000,9223372036854775808,12960666915911099378,A,1 + 1996-12-21T00:44:00.000000000,9223372036854775808,12960666915911099378,A,2 + 1996-12-21T00:45:01.000000000,9223372036854775808,12960666915911099378,A,3 + 1996-12-21T08:00:00.000000000,9223372036854775808,12960666915911099378,A,4 + 1996-12-22T00:00:00.000000000,18446744073709551615,2867199309159137213,B,1 + 1996-12-22T00:00:00.000000000,18446744073709551615,12960666915911099378,A,5 + 1996-12-22T00:25:02.000000000,9223372036854775808,2867199309159137213,B,1 + 1996-12-22T00:30:03.000000000,9223372036854775808,12960666915911099378,A,1 + 1996-12-23T00:00:00.000000000,18446744073709551615,2867199309159137213,B,2 + 1996-12-23T00:00:00.000000000,18446744073709551615,12960666915911099378,A,2 + 1996-12-23T00:40:04.000000000,9223372036854775808,12960666915911099378,A,1 "###); } diff --git a/crates/sparrow-main/tests/e2e/time_tests.rs b/crates/sparrow-main/tests/e2e/time_tests.rs index cb88c8b42..f24234252 100644 --- a/crates/sparrow-main/tests/e2e/time_tests.rs +++ b/crates/sparrow-main/tests/e2e/time_tests.rs @@ -21,13 +21,13 @@ use crate::QueryFixture; async fn test_time_of_boolean() { insta::assert_snapshot!(QueryFixture::new("{ time_of: time_of(Booleans.a)}").run_to_csv(&boolean_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time_of - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:57.000000000 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:40:57.000000000 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:41:57.000000000 - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:42:57.000000000 - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:43:57.000000000 - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:44:57.000000000 - 1996-12-20T00:45:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:45:57.000000000 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:57.000000000 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:40:57.000000000 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:41:57.000000000 + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:42:57.000000000 + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:43:57.000000000 + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:44:57.000000000 + 1996-12-20T00:45:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:45:57.000000000 "###); } @@ -35,12 +35,12 @@ async fn test_time_of_boolean() { async fn test_time_of_i64() { insta::assert_snapshot!(QueryFixture::new("{ time_of: time_of(Numbers.m)}").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time_of - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:57.000000000 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:39:58.000000000 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:59.000000000 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:00.000000000 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:01.000000000 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:02.000000000 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:57.000000000 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:39:58.000000000 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:59.000000000 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:00.000000000 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:01.000000000 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:02.000000000 "###); } @@ -48,12 +48,12 @@ async fn test_time_of_i64() { async fn test_time_of_timestamp() { insta::assert_snapshot!(QueryFixture::new("{ time_of: time_of(Times.time) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time_of - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000 "###); } @@ -61,12 +61,12 @@ async fn test_time_of_timestamp() { async fn test_time_of_string() { insta::assert_snapshot!(QueryFixture::new("{ time_of: time_of(Strings.s)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time_of - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:57.000000000 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:40:57.000000000 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:41:57.000000000 - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:42:57.000000000 - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:43:57.000000000 - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:44:57.000000000 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:57.000000000 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:40:57.000000000 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:41:57.000000000 + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:42:57.000000000 + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:43:57.000000000 + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:44:57.000000000 "###); } @@ -74,12 +74,12 @@ async fn test_time_of_string() { async fn test_time_of_record() { insta::assert_snapshot!(QueryFixture::new("{ time_of: time_of(Strings)}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time_of - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:57.000000000 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:40:57.000000000 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:41:57.000000000 - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:42:57.000000000 - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:43:57.000000000 - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:44:57.000000000 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:57.000000000 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:40:57.000000000 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:41:57.000000000 + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:42:57.000000000 + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:43:57.000000000 + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:44:57.000000000 "###); } @@ -87,12 +87,12 @@ async fn test_time_of_record() { async fn test_time_of_record_as_i64() { insta::assert_snapshot!(QueryFixture::new("{ time_of: time_of(Strings) as i64}").run_to_csv(&strings_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time_of - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,851042397000000000 - 1996-12-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,851042457000000000 - 1996-12-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,851042517000000000 - 1996-12-20T00:42:57.000000000,9223372036854775808,11753611437813598533,B,851042577000000000 - 1996-12-20T00:43:57.000000000,9223372036854775808,11753611437813598533,B,851042637000000000 - 1996-12-20T00:44:57.000000000,9223372036854775808,11753611437813598533,B,851042697000000000 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,851042397000000000 + 1996-12-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,851042457000000000 + 1996-12-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,851042517000000000 + 1996-12-20T00:42:57.000000000,9223372036854775808,2867199309159137213,B,851042577000000000 + 1996-12-20T00:43:57.000000000,9223372036854775808,2867199309159137213,B,851042637000000000 + 1996-12-20T00:44:57.000000000,9223372036854775808,2867199309159137213,B,851042697000000000 "###); } @@ -100,12 +100,12 @@ async fn test_time_of_record_as_i64() { async fn test_day_of_month() { insta::assert_snapshot!(QueryFixture::new("{ day_of_month: day_of_month(Times.time) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,day_of_month - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,20 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,20 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,20 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,12 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,13 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,6 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,20 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,20 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,20 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,12 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,13 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,6 "###); } @@ -113,12 +113,12 @@ async fn test_day_of_month() { async fn test_day_of_month0() { insta::assert_snapshot!(QueryFixture::new("{ day_of_month0: day_of_month0(Times.time) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,day_of_month0 - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,19 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,19 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,19 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,11 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,12 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,5 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,19 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,19 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,19 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,11 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,12 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,5 "###); } @@ -126,12 +126,12 @@ async fn test_day_of_month0() { async fn test_day_of_year() { insta::assert_snapshot!(QueryFixture::new("{ day_of_year: day_of_year(Times.time) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,day_of_year - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,354 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,293 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,233 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,346 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,347 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,341 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,354 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,293 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,233 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,346 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,347 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,341 "###); } @@ -139,12 +139,12 @@ async fn test_day_of_year() { async fn test_day_of_year0() { insta::assert_snapshot!(QueryFixture::new("{ day_of_year0: day_of_year0(Times.time) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,day_of_year0 - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,353 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,292 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,232 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,345 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,346 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,340 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,353 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,292 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,232 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,345 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,346 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,340 "###); } @@ -152,12 +152,12 @@ async fn test_day_of_year0() { async fn test_month_of_year() { insta::assert_snapshot!(QueryFixture::new("{ month_of_year: month_of_year(Times.time) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,month_of_year - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,12 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,10 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,8 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,12 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,12 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,12 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,12 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,10 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,8 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,12 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,12 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,12 "###); } @@ -165,12 +165,12 @@ async fn test_month_of_year() { async fn test_month_of_year0() { insta::assert_snapshot!(QueryFixture::new("{ month_of_year0: month_of_year0(Times.time) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,month_of_year0 - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,11 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,9 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,7 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,11 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,11 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,11 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,11 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,9 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,7 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,11 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,11 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,11 "###); } @@ -178,12 +178,12 @@ async fn test_month_of_year0() { async fn test_year() { insta::assert_snapshot!(QueryFixture::new("{ year: year(Times.time) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,year - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004 "###); } @@ -191,12 +191,12 @@ async fn test_year() { async fn test_add_time_duration_s() { insta::assert_snapshot!(QueryFixture::new("{ add_time: Times.time | add_time(seconds(Times.n)) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,add_time - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:59.000000000 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:41:01.000000000 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:42:02.000000000 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:44:05.000000000 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:45:20.000000000 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:59.000000000 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:41:01.000000000 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:42:02.000000000 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:44:05.000000000 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:45:20.000000000 "###); } @@ -205,12 +205,12 @@ async fn test_add_time_duration_s_to_literal() { // This ensures that a string literal may be treated as a timestamp. insta::assert_snapshot!(QueryFixture::new("{ add_time: \"1994-12-20T00:39:59.000000000Z\" | add_time(seconds(Times.n)) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,add_time - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:40:01.000000000 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1994-12-20T00:40:03.000000000 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1994-12-20T00:40:04.000000000 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1994-12-20T00:40:07.000000000 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1994-12-20T00:40:22.000000000 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:40:01.000000000 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1994-12-20T00:40:03.000000000 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1994-12-20T00:40:04.000000000 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1994-12-20T00:40:07.000000000 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1994-12-20T00:40:22.000000000 "###); } @@ -218,12 +218,12 @@ async fn test_add_time_duration_s_to_literal() { async fn test_add_time_duration_s_literal() { insta::assert_snapshot!(QueryFixture::new("{ add_time: Times.time | add_time(seconds(10000)) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,add_time - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T03:26:37.000000000 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T03:27:37.000000000 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T03:28:37.000000000 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T03:29:37.000000000 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T03:30:37.000000000 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T03:31:37.000000000 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T03:26:37.000000000 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T03:27:37.000000000 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T03:28:37.000000000 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T03:29:37.000000000 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T03:30:37.000000000 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T03:31:37.000000000 "###); } @@ -231,12 +231,12 @@ async fn test_add_time_duration_s_literal() { async fn test_add_time_interval_months() { insta::assert_snapshot!(QueryFixture::new("{ add_time: Times.time | add_time(months(Times.n)) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,add_time - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1995-02-20T00:39:57.000000000 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1996-02-20T00:40:57.000000000 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1997-01-20T00:41:57.000000000 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1999-08-13T00:43:57.000000000 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2006-11-06T00:44:57.000000000 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1995-02-20T00:39:57.000000000 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1996-02-20T00:40:57.000000000 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1997-01-20T00:41:57.000000000 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1999-08-13T00:43:57.000000000 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2006-11-06T00:44:57.000000000 "###); } @@ -244,12 +244,12 @@ async fn test_add_time_interval_months() { async fn test_add_time_interval_months_literal() { insta::assert_snapshot!(QueryFixture::new("{ add_time: Times.time | add_time(months(27)) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,add_time - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1997-03-20T00:39:57.000000000 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1998-01-20T00:40:57.000000000 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1998-11-20T00:41:57.000000000 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,2000-03-12T00:42:57.000000000 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,2001-03-13T00:43:57.000000000 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2007-03-06T00:44:57.000000000 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1997-03-20T00:39:57.000000000 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1998-01-20T00:40:57.000000000 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1998-11-20T00:41:57.000000000 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,2000-03-12T00:42:57.000000000 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,2001-03-13T00:43:57.000000000 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2007-03-06T00:44:57.000000000 "###); } @@ -257,12 +257,12 @@ async fn test_add_time_interval_months_literal() { async fn test_add_time_interval_months_literal_negative() { insta::assert_snapshot!(QueryFixture::new("{ add_time: Times.time | add_time(months(-1)) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,add_time - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-11-20T00:39:57.000000000 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-09-20T00:40:57.000000000 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-07-20T00:41:57.000000000 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-11-12T00:42:57.000000000 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-11-13T00:43:57.000000000 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-11-06T00:44:57.000000000 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-11-20T00:39:57.000000000 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-09-20T00:40:57.000000000 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-07-20T00:41:57.000000000 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-11-12T00:42:57.000000000 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-11-13T00:43:57.000000000 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-11-06T00:44:57.000000000 "###); } @@ -270,12 +270,12 @@ async fn test_add_time_interval_months_literal_negative() { async fn test_add_time_interval_days() { insta::assert_snapshot!(QueryFixture::new("{ add_time: Times.time | add_time(days(Times.n)) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,add_time - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-22T00:39:57.000000000 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-24T00:40:57.000000000 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-25T00:41:57.000000000 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B, - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-21T00:43:57.000000000 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-29T00:44:57.000000000 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-22T00:39:57.000000000 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-24T00:40:57.000000000 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-25T00:41:57.000000000 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B, + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-21T00:43:57.000000000 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-29T00:44:57.000000000 "###); } @@ -283,12 +283,12 @@ async fn test_add_time_interval_days() { async fn test_add_time_interval_days_literal() { insta::assert_snapshot!(QueryFixture::new("{ add_time: Times.time | add_time(days(372)) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,add_time - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1995-12-27T00:39:57.000000000 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1996-10-26T00:40:57.000000000 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1997-08-27T00:41:57.000000000 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-19T00:42:57.000000000 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1999-12-20T00:43:57.000000000 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2005-12-13T00:44:57.000000000 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1995-12-27T00:39:57.000000000 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1996-10-26T00:40:57.000000000 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1997-08-27T00:41:57.000000000 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-19T00:42:57.000000000 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1999-12-20T00:43:57.000000000 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2005-12-13T00:44:57.000000000 "###); } @@ -299,12 +299,12 @@ async fn test_seconds_between() { let seconds_between = seconds_between(time, other_time) as i64 in { time, other_time, seconds_between }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,other_time,seconds_between - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000,2003-12-20T00:39:57.000000000,283996800 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,1994-11-20T00:39:57.000000000,-28857660 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,1998-12-20T00:39:57.000000000,73612680 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000,1992-12-20T00:39:57.000000000,-157075380 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000,, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000,1994-12-20T00:39:57.000000000,-314409900 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000,2003-12-20T00:39:57.000000000,283996800 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,1994-11-20T00:39:57.000000000,-28857660 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,1998-12-20T00:39:57.000000000,73612680 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000,1992-12-20T00:39:57.000000000,-157075380 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000,, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000,1994-12-20T00:39:57.000000000,-314409900 "###); } @@ -315,12 +315,12 @@ async fn test_days_between() { let days_between = days_between(time, other_time) as i32 in { time, other_time, days_between }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,other_time,days_between - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000,2003-12-20T00:39:57.000000000,3287 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,1994-11-20T00:39:57.000000000,-334 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,1998-12-20T00:39:57.000000000,851 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000,1992-12-20T00:39:57.000000000,-1818 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000,, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000,1994-12-20T00:39:57.000000000,-3639 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000,2003-12-20T00:39:57.000000000,3287 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,1994-11-20T00:39:57.000000000,-334 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,1998-12-20T00:39:57.000000000,851 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000,1992-12-20T00:39:57.000000000,-1818 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000,, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000,1994-12-20T00:39:57.000000000,-3639 "###); // Tests that interval_days can cast to other types @@ -329,12 +329,12 @@ async fn test_days_between() { let days_between = days_between(time, other_time) as i64 in { time, other_time, days_between }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,other_time,days_between - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000,2003-12-20T00:39:57.000000000,3287 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,1994-11-20T00:39:57.000000000,-334 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,1998-12-20T00:39:57.000000000,851 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000,1992-12-20T00:39:57.000000000,-1818 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000,, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000,1994-12-20T00:39:57.000000000,-3639 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000,2003-12-20T00:39:57.000000000,3287 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,1994-11-20T00:39:57.000000000,-334 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,1998-12-20T00:39:57.000000000,851 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000,1992-12-20T00:39:57.000000000,-1818 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000,, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000,1994-12-20T00:39:57.000000000,-3639 "###); insta::assert_snapshot!(QueryFixture::new("let time = Times.time @@ -342,12 +342,12 @@ async fn test_days_between() { let days_between = days_between(time, other_time) as f32 in { time, other_time, days_between }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,other_time,days_between - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000,2003-12-20T00:39:57.000000000,3287.0 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,1994-11-20T00:39:57.000000000,-334.0 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,1998-12-20T00:39:57.000000000,851.0 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000,1992-12-20T00:39:57.000000000,-1818.0 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000,, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000,1994-12-20T00:39:57.000000000,-3639.0 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000,2003-12-20T00:39:57.000000000,3287.0 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,1994-11-20T00:39:57.000000000,-334.0 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,1998-12-20T00:39:57.000000000,851.0 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000,1992-12-20T00:39:57.000000000,-1818.0 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000,, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000,1994-12-20T00:39:57.000000000,-3639.0 "###); } @@ -358,12 +358,12 @@ async fn test_months_between() { let months_between = months_between(time, other_time) as i32 in { time, other_time, months_between }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,other_time,months_between - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000,2003-12-20T00:39:57.000000000,108 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,1994-11-20T00:39:57.000000000,-11 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,1998-12-20T00:39:57.000000000,28 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000,1992-12-20T00:39:57.000000000,-60 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000,, - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000,1994-12-20T00:39:57.000000000,-120 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000,2003-12-20T00:39:57.000000000,108 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,1994-11-20T00:39:57.000000000,-11 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,1998-12-20T00:39:57.000000000,28 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000,1992-12-20T00:39:57.000000000,-60 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000,, + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000,1994-12-20T00:39:57.000000000,-120 "###); } @@ -374,12 +374,12 @@ async fn test_seconds_between_literal() { let seconds_between = seconds_between(time, \"1994-12-20T00:41:57.000000000-08:00\") as i64 in { time, seconds_between }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,seconds_between - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000,28920 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,-26236740 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,-52588800 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000,-93974460 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000,-125596920 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000,-314380980 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000,28920 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,-26236740 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,-52588800 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000,-93974460 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000,-125596920 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000,-314380980 "###); } @@ -390,12 +390,12 @@ async fn test_days_between_literal() { let days_between = days_between(time, \"1994-12-20T00:41:57.000000000-08:00\") as i32 in { time, days_between }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,days_between - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000,0 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,-303 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,-608 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000,-1087 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000,-1453 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000,-3638 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000,0 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,-303 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,-608 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000,-1087 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000,-1453 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000,-3638 "###); } @@ -406,12 +406,12 @@ async fn test_months_between_literal() { let months_between = months_between(time, \"1994-12-20T00:41:57.000000000-08:00\") as i32 in { time, months_between }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,months_between - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1994-12-20T00:39:57.000000000,0 - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,-10 - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,-20 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000,-36 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000,-48 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,2004-12-06T00:44:57.000000000,-120 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1994-12-20T00:39:57.000000000,0 + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,-10 + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,-20 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000,-36 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000,-48 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,2004-12-06T00:44:57.000000000,-120 "###); } @@ -441,12 +441,12 @@ async fn test_lag_0_i64() { async fn test_lag_1_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, lag_one: lag(1, Numbers.m) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,lag_one - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17,5 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,17 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,17 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,12 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17,5 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,,5 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,17 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,17 "###) } @@ -454,12 +454,12 @@ async fn test_lag_1_i64() { async fn test_lag_2_i64() { insta::assert_snapshot!(QueryFixture::new("{ m: Numbers.m, lag_two: Numbers.m | lag(2) }").run_to_csv(&i64_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,m,lag_two - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,5, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,24, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,17, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,,5 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12,5 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,17 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,5, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,24, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,17, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12,5 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,5 "###) } @@ -472,11 +472,11 @@ async fn test_mean_time_between() { in { prev, curr, elapsed, mean_elapsed: mean(elapsed) }").run_to_csv(×tamp_ns_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,prev,curr,elapsed,mean_elapsed - 1994-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,,1994-12-20T00:39:57.000000000,, - 1995-10-20T00:40:57.000000000,9223372036854775808,11753611437813598533,B,,1995-10-20T00:40:57.000000000,, - 1996-08-20T00:41:57.000000000,9223372036854775808,11753611437813598533,B,1995-10-20T00:40:57.000000000,1996-08-20T00:41:57.000000000,26352060,26352060.0 - 1997-12-12T00:42:57.000000000,9223372036854775808,11753611437813598533,B,1996-08-20T00:41:57.000000000,1997-12-12T00:42:57.000000000,41385660,33868860.0 - 1998-12-13T00:43:57.000000000,9223372036854775808,11753611437813598533,B,1997-12-12T00:42:57.000000000,1998-12-13T00:43:57.000000000,31622460,33120060.0 - 2004-12-06T00:44:57.000000000,9223372036854775808,11753611437813598533,B,1998-12-13T00:43:57.000000000,2004-12-06T00:44:57.000000000,188784060,72036060.0 + 1994-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,,1994-12-20T00:39:57.000000000,, + 1995-10-20T00:40:57.000000000,9223372036854775808,2867199309159137213,B,,1995-10-20T00:40:57.000000000,, + 1996-08-20T00:41:57.000000000,9223372036854775808,2867199309159137213,B,1995-10-20T00:40:57.000000000,1996-08-20T00:41:57.000000000,26352060,26352060.0 + 1997-12-12T00:42:57.000000000,9223372036854775808,2867199309159137213,B,1996-08-20T00:41:57.000000000,1997-12-12T00:42:57.000000000,41385660,33868860.0 + 1998-12-13T00:43:57.000000000,9223372036854775808,2867199309159137213,B,1997-12-12T00:42:57.000000000,1998-12-13T00:43:57.000000000,31622460,33120060.0 + 2004-12-06T00:44:57.000000000,9223372036854775808,2867199309159137213,B,1998-12-13T00:43:57.000000000,2004-12-06T00:44:57.000000000,188784060,72036060.0 "###) } diff --git a/crates/sparrow-main/tests/e2e/when_tests.rs b/crates/sparrow-main/tests/e2e/when_tests.rs index c8a97b69e..4b8adf52f 100644 --- a/crates/sparrow-main/tests/e2e/when_tests.rs +++ b/crates/sparrow-main/tests/e2e/when_tests.rs @@ -103,9 +103,9 @@ async fn test_when_output_resets_to_null() { async fn test_boolean_when() { insta::assert_snapshot!(QueryFixture::new("{ when: WhenFixture.bool | when(WhenFixture.cond) }").run_to_csv(&when_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,when - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A, + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A, "###); } @@ -115,7 +115,7 @@ async fn test_when_cond() { // rows in the last slice of the input. insta::assert_snapshot!(QueryFixture::new("WhenFixture | when(WhenFixture.i64 == 2)").run_to_csv(&when_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,cond,bool,i64,string - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:02.000000000,0,A,true,,2,hello + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:02.000000000,0,A,true,,2,hello "###) } @@ -123,9 +123,9 @@ async fn test_when_cond() { async fn test_i64_when() { insta::assert_snapshot!(QueryFixture::new("{ when: WhenFixture.i64 | when(WhenFixture.cond) }").run_to_csv(&when_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,when - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,57 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,2 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,57 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,2 "###); } @@ -133,9 +133,9 @@ async fn test_i64_when() { async fn test_timestamp_when() { insta::assert_snapshot!(QueryFixture::new("{ when: WhenFixture.time | when(WhenFixture.cond) }").run_to_csv(&when_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,when - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:57.000000000 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:00.000000000 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:02.000000000 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:57.000000000 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:00.000000000 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:02.000000000 "###); } @@ -143,9 +143,9 @@ async fn test_timestamp_when() { async fn test_string_when() { insta::assert_snapshot!(QueryFixture::new("{ when: WhenFixture.string | when(WhenFixture.cond) }").run_to_csv(&when_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,when - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,hello - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,hello + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,hello + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,hello "###); } @@ -153,9 +153,9 @@ async fn test_string_when() { async fn test_record_when() { insta::assert_snapshot!(QueryFixture::new("WhenFixture | when($input.cond)").run_to_csv(&when_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,cond,bool,i64,string - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:57.000000000,0,A,true,false,57,hello - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:00.000000000,0,A,true,,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:02.000000000,0,A,true,,2,hello + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:57.000000000,0,A,true,false,57,hello + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:00.000000000,0,A,true,,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:02.000000000,0,A,true,,2,hello "###); } @@ -163,9 +163,9 @@ async fn test_record_when() { async fn test_record_when_chained() { insta::assert_snapshot!(QueryFixture::new("WhenFixture | when($input.cond) | when(WhenFixture.cond)").run_to_csv(&when_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,cond,bool,i64,string - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:57.000000000,0,A,true,false,57,hello - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:00.000000000,0,A,true,,, - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:02.000000000,0,A,true,,2,hello + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:57.000000000,0,A,true,false,57,hello + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:00.000000000,0,A,true,,, + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:02.000000000,0,A,true,,2,hello "###); } diff --git a/crates/sparrow-main/tests/e2e/windowed_aggregation_tests.rs b/crates/sparrow-main/tests/e2e/windowed_aggregation_tests.rs index 92373358a..67f621087 100644 --- a/crates/sparrow-main/tests/e2e/windowed_aggregation_tests.rs +++ b/crates/sparrow-main/tests/e2e/windowed_aggregation_tests.rs @@ -67,14 +67,14 @@ async fn window_data_fixture_with_nulls() -> DataFixture { async fn test_sliding_window_with_predicate() { insta::assert_snapshot!(QueryFixture::new("{ since: count(Foo, window=since(daily())), slide: Foo | count(window=sliding(2, $input | is_valid())) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,since,slide - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1,1 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,1,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,2,2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,3,2 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,4,2 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,5,2 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,6,2 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,7,2 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1,1 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,1,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,2,2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,3,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,4,2 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,5,2 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,6,2 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,7,2 "###); } @@ -82,8 +82,8 @@ async fn test_sliding_window_with_predicate() { async fn test_sliding_window_with_predicate_final_results() { insta::assert_snapshot!(QueryFixture::new("{ since: count(Foo, window=since(daily())), slide: Foo | count(window=sliding(2, $input | is_valid())) }").with_final_results().run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,since,slide - 1996-12-20T00:40:04.000000001,18446744073709551615,3650215962958587783,A,7,2 - 1996-12-20T00:40:04.000000001,18446744073709551615,11753611437813598533,B,1,1 + 1996-12-20T00:40:04.000000001,18446744073709551615,2867199309159137213,B,1,1 + 1996-12-20T00:40:04.000000001,18446744073709551615,12960666915911099378,A,7,2 "###); } @@ -91,14 +91,14 @@ async fn test_sliding_window_with_predicate_final_results() { async fn test_count_since_window() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, cond: Foo.n < 7.0, count: count(Foo.n), count_since: count(Foo.n, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,cond,count,count_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,false,1,1 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,true,1,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,6.2,true,2,2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,false,3,1 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3.0,true,4,2 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,8.0,false,5,1 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,,,5,1 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,false,6,2 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,false,1,1 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,true,1,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,6.2,true,2,2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,false,3,1 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3.0,true,4,2 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,8.0,false,5,1 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,,,5,1 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,false,6,2 "###); } @@ -106,14 +106,14 @@ async fn test_count_since_window() { async fn test_sum_since_window() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, cond: Foo.n < 7.0, sum: sum(Foo.n), sum_since: sum(Foo.n, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,cond,sum,sum_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,false,10.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,true,3.9,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,6.2,true,16.2,16.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,false,25.45,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3.0,true,28.45,12.25 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,8.0,false,36.45,8.0 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,,,36.45,8.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,false,46.45,18.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,false,10.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,true,3.9,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,6.2,true,16.2,16.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,false,25.45,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3.0,true,28.45,12.25 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,8.0,false,36.45,8.0 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,,,36.45,8.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,false,46.45,18.0 "###); } @@ -121,14 +121,14 @@ async fn test_sum_since_window() { async fn test_min_since_window() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, cond: Foo.n < 7.0, min: min(Foo.n), min_since: min(Foo.n, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,cond,min,min_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,false,10.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,true,3.9,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,6.2,true,6.2,6.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,false,6.2,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3.0,true,3.0,3.0 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,8.0,false,3.0,8.0 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,,,3.0,8.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,false,3.0,8.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,false,10.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,true,3.9,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,6.2,true,6.2,6.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,false,6.2,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3.0,true,3.0,3.0 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,8.0,false,3.0,8.0 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,,,3.0,8.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,false,3.0,8.0 "###); } @@ -136,14 +136,14 @@ async fn test_min_since_window() { async fn test_max_since_window() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, cond: Foo.n < 7.0, max: max(Foo.n), max_since: max(Foo.n, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,cond,max,max_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,false,10.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,true,3.9,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,6.2,true,10.0,10.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,false,10.0,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3.0,true,10.0,9.25 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,8.0,false,10.0,8.0 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,,,10.0,8.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,false,10.0,10.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,false,10.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,true,3.9,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,6.2,true,10.0,10.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,false,10.0,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3.0,true,10.0,9.25 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,8.0,false,10.0,8.0 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,,,10.0,8.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,false,10.0,10.0 "###); } @@ -151,14 +151,14 @@ async fn test_max_since_window() { async fn test_mean_since_window() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, cond: Foo.n < 7.0, mean: mean(Foo.n), mean_since: mean(Foo.n, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,cond,mean,mean_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,false,10.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,true,3.9,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,6.2,true,8.1,8.1 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,false,8.483333333333333,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3.0,true,7.112499999999999,6.125 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,8.0,false,7.289999999999999,8.0 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,,,7.289999999999999,8.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,false,7.741666666666666,9.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,false,10.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,true,3.9,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,6.2,true,8.1,8.1 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,false,8.483333333333333,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3.0,true,7.112499999999999,6.125 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,8.0,false,7.289999999999999,8.0 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,,,7.289999999999999,8.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,false,7.741666666666666,9.0 "###); } @@ -166,14 +166,14 @@ async fn test_mean_since_window() { async fn test_variance_since_window() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, cond: Foo.n < 7.0, variance: variance(Foo.n), variance_since: variance(Foo.n, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,cond,variance,variance_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,false,, - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,true,, - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,6.2,true,3.609999999999999,3.609999999999999 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,false,2.7005555555555554, - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3.0,true,7.662968749999997,9.765625 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,8.0,false,6.256399999999998, - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,,,6.256399999999998, - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,false,6.233680555555555,1.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,false,, + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,true,, + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,6.2,true,3.609999999999999,3.609999999999999 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,false,2.7005555555555554, + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3.0,true,7.662968749999997,9.765625 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,8.0,false,6.256399999999998, + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,,,6.256399999999998, + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,false,6.233680555555555,1.0 "###); } @@ -181,14 +181,14 @@ async fn test_variance_since_window() { async fn test_last_since_window() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, cond: Foo.n < 7.0, last: last(Foo.n), last_since: last(Foo.n, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,cond,last,last_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,false,10.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,true,3.9,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,6.2,true,6.2,6.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,false,9.25,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3.0,true,3.0,3.0 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,8.0,false,8.0,8.0 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,,,8.0,8.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,false,10.0,10.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,false,10.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,true,3.9,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,6.2,true,6.2,6.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,false,9.25,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3.0,true,3.0,3.0 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,8.0,false,8.0,8.0 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,,,8.0,8.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,false,10.0,10.0 "###); } @@ -196,14 +196,14 @@ async fn test_last_since_window() { async fn test_f64_first_since_window() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, cond: Foo.n < 7.0, first: first(Foo.n), first_since: first(Foo.n, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,cond,first,first_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,false,10.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,true,3.9,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,6.2,true,10.0,10.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,false,10.0,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3.0,true,10.0,9.25 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,8.0,false,10.0,8.0 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,,,10.0,8.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,false,10.0,8.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,false,10.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,true,3.9,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,6.2,true,10.0,10.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,false,10.0,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3.0,true,10.0,9.25 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,8.0,false,10.0,8.0 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,,,10.0,8.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,false,10.0,8.0 "###); } @@ -211,14 +211,14 @@ async fn test_f64_first_since_window() { async fn test_string_first_since_window() { insta::assert_snapshot!(QueryFixture::new("{ vegetable: Foo.vegetable, cond: Foo.n < 7.0, first: first(Foo.vegetable), first_since: first(Foo.vegetable, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,vegetable,cond,first,first_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,arugula,false,arugula,arugula - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,beet,true,beet,beet - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,carrot,true,arugula,arugula - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,dill,false,arugula,dill - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,edamame,true,arugula,dill - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,fennel,false,arugula,fennel - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,green beans,,arugula,fennel - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,habanero,false,arugula,fennel + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,arugula,false,arugula,arugula + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,beet,true,beet,beet + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,carrot,true,arugula,arugula + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,dill,false,arugula,dill + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,edamame,true,arugula,dill + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,fennel,false,arugula,fennel + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,green beans,,arugula,fennel + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,habanero,false,arugula,fennel "###); } @@ -226,14 +226,14 @@ async fn test_string_first_since_window() { async fn test_string_last_since_window() { insta::assert_snapshot!(QueryFixture::new("{ vegetable: Foo.vegetable, cond: Foo.n < 7.0, last: last(Foo.vegetable), last_since: last(Foo.vegetable, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,vegetable,cond,last,last_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,arugula,false,arugula,arugula - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,beet,true,beet,beet - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,carrot,true,carrot,carrot - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,dill,false,dill,dill - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,edamame,true,edamame,edamame - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,fennel,false,fennel,fennel - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,green beans,,green beans,green beans - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,habanero,false,habanero,habanero + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,arugula,false,arugula,arugula + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,beet,true,beet,beet + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,carrot,true,carrot,carrot + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,dill,false,dill,dill + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,edamame,true,edamame,edamame + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,fennel,false,fennel,fennel + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,green beans,,green beans,green beans + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,habanero,false,habanero,habanero "###); } @@ -241,14 +241,14 @@ async fn test_string_last_since_window() { async fn test_bool_first_since_window() { insta::assert_snapshot!(QueryFixture::new("{ bool: Foo.bool, cond: Foo.n < 7.0, first: first(Foo.bool), first_since: first(Foo.bool, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,bool,cond,first,first_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,false,true,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,true,true,true,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,false,true,true,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,false,false,true,false - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,true,true,true,false - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,false,false,true,false - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,true,,true,false - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,false,false,true,false + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,false,true,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,true,true,true,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,false,true,true,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,false,false,true,false + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,true,true,true,false + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,false,false,true,false + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,true,,true,false + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,false,false,true,false "###); } @@ -256,14 +256,14 @@ async fn test_bool_first_since_window() { async fn test_bool_last_since_window() { insta::assert_snapshot!(QueryFixture::new("{ bool: Foo.bool, cond: Foo.n < 7.0, last: last(Foo.bool), last_since: last(Foo.bool, window=since(Foo.n < 7.0)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,bool,cond,last,last_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,false,true,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,true,true,true,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,false,true,false,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,false,false,false,false - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,true,true,true,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,false,false,false,false - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,true,,true,true - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,false,false,false,false + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,false,true,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,true,true,true,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,false,true,false,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,false,false,false,false + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,true,true,true,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,false,false,false,false + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,true,,true,true + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,false,false,false,false "###); } @@ -273,14 +273,14 @@ async fn test_bool_last_since_window() { async fn test_first_since_window_emits_value_on_reset() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, cond: Foo.bool, first_since: first(Foo.n, window=since(Foo.bool)) }").run_to_csv(&window_data_fixture_with_nulls().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,cond,first_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,true,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,true,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,,false, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,,,9.25 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,false,9.25 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,1.0,true,9.25 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,true,10.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,true,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,true,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,,false, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,,,9.25 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,false,9.25 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,1.0,true,9.25 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,true,10.0 "###); } @@ -290,14 +290,14 @@ async fn test_first_since_window_emits_value_on_reset() { async fn test_first_sliding_window_emits_value_on_reset() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, first_sliding: first(Foo.n, window=sliding(2, is_valid(Foo))) }").run_to_csv(&window_data_fixture_with_nulls().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,first_sliding - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,,10.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,,9.25 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,1.0,1.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,1.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,,10.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,,9.25 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,1.0,1.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,1.0 "###); } @@ -307,14 +307,14 @@ async fn test_first_sliding_window_emits_value_on_reset() { async fn test_last_since_window_emits_value_on_reset() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, cond: Foo.bool, last_since: last(Foo.n, window=since(Foo.bool)) }").run_to_csv(&window_data_fixture_with_nulls().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,cond,last_since - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,true,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,true,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,,false, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,,,9.25 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,,false,9.25 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,1.0,true,1.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,true,10.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,true,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,true,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,,false, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,,,9.25 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,,false,9.25 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,1.0,true,1.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,true,10.0 "###); } @@ -324,14 +324,14 @@ async fn test_last_since_window_emits_value_on_reset() { async fn test_last_sliding_window_emits_value_on_reset() { insta::assert_snapshot!(QueryFixture::new("{ n: Foo.n, last_sliding: last(Foo.n, window=sliding(2, is_valid(Foo))) }").run_to_csv(&window_data_fixture_with_nulls().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,n,last_sliding - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,,10.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,,9.25 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,, - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,1.0,1.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0,10.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,,10.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,,9.25 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,, + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,1.0,1.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0,10.0 "###); } @@ -339,14 +339,14 @@ async fn test_last_sliding_window_emits_value_on_reset() { async fn test_count_sliding_window_every_few_events() { insta::assert_snapshot!(QueryFixture::new("{ cond: is_valid(Foo), total_count: count(Foo), sliding_count: count(Foo, window=sliding(3, is_valid(Foo))) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,cond,total_count,sliding_count - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,1,1 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,true,1,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,true,2,2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,true,3,3 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,true,4,3 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,true,5,3 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,true,6,3 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,true,7,3 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,1,1 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,true,1,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,true,2,2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,true,3,3 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,true,4,3 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,true,5,3 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,true,6,3 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,true,7,3 "###); } @@ -354,14 +354,14 @@ async fn test_count_sliding_window_every_few_events() { async fn test_count_sliding_window_with_condition() { insta::assert_snapshot!(QueryFixture::new("{ cond: Foo.n > 5, sliding_count: count(Foo.n, window=sliding(2, Foo.n > 5)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,cond,sliding_count - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true,1 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,false,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,true,2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,true,2 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,false,2 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,true,3 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,,2 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,true,3 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true,1 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,false,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,true,2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,true,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,false,2 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,true,3 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,,2 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,true,3 "###); } @@ -369,14 +369,14 @@ async fn test_count_sliding_window_with_condition() { async fn test_count_sliding_duration_1_equivalent_to_since() { insta::assert_snapshot!(QueryFixture::new("{ since: count(Foo.bool, window=since(Foo.n > 5)), sliding: count(Foo.bool, window=sliding(1, Foo.n > 5)) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,since,sliding - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,1,1 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,1,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,1,1 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,1,1 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,1,1 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,2,2 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,1,1 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,2,2 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,1,1 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,1,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,1,1 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,1,1 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,1,1 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,2,2 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,1,1 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,2,2 "###); } @@ -384,14 +384,14 @@ async fn test_count_sliding_duration_1_equivalent_to_since() { async fn test_sum_sliding_every_few_events() { insta::assert_snapshot!(QueryFixture::new("{ sliding_sum: sum(Foo.n, window=sliding(2, is_valid(Foo))) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sliding_sum - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,16.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,15.45 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,12.25 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,11.0 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,8.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,16.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,15.45 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,12.25 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,11.0 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,8.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0 "###); } @@ -399,14 +399,14 @@ async fn test_sum_sliding_every_few_events() { async fn test_first_f64_sliding_every_few_events() { insta::assert_snapshot!(QueryFixture::new("{ sliding_first: first(Foo.n, window=sliding(2, is_valid(Foo))) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sliding_first - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,10.0 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,6.2 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,9.25 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,3.0 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,8.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,10.0 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,6.2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,9.25 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,3.0 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,8.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0 "###); } @@ -414,14 +414,14 @@ async fn test_first_f64_sliding_every_few_events() { async fn test_first_string_sliding_every_few_events() { insta::assert_snapshot!(QueryFixture::new("{ sliding_first: first(Foo.vegetable, window=sliding(2, is_valid(Foo))) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sliding_first - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,arugula - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,beet - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,arugula - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,carrot - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,dill - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,edamame - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,fennel - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,green beans + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,arugula + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,beet + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,arugula + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,carrot + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,dill + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,edamame + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,fennel + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,green beans "###); } @@ -429,14 +429,14 @@ async fn test_first_string_sliding_every_few_events() { async fn test_first_boolean_sliding_every_few_events() { insta::assert_snapshot!(QueryFixture::new("{ sliding_first: first(Foo.bool, window=sliding(2, is_valid(Foo))) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sliding_first - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,true - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,false - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,false - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,true - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,false - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,true + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,false + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,false + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,false + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,true "###); } @@ -444,28 +444,28 @@ async fn test_first_boolean_sliding_every_few_events() { async fn test_last_f64_sliding_every_few_events() { insta::assert_snapshot!(QueryFixture::new("{ sliding_last: last(Foo.n, window=sliding(2, is_valid(Foo))) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sliding_last - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,10.0 - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,3.9 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,6.2 - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,9.25 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,3.0 - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,8.0 - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,8.0 - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,10.0 + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,10.0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,3.9 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,6.2 + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,9.25 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,3.0 + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,8.0 + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,8.0 + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,10.0 "###); } #[tokio::test] async fn test_last_string_sliding_every_few_events() { insta::assert_snapshot!(QueryFixture::new("{ sliding_last: last(Foo.vegetable, window=sliding(2, is_valid(Foo))) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sliding_last - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,arugula - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,beet - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,carrot - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,dill - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,edamame - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,fennel - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,green beans - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,habanero + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,arugula + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,beet + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,carrot + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,dill + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,edamame + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,fennel + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,green beans + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,habanero "###); } @@ -473,14 +473,14 @@ async fn test_last_string_sliding_every_few_events() { async fn test_last_bool_sliding_every_few_events() { insta::assert_snapshot!(QueryFixture::new("{ sliding_last: last(Foo.bool, window=sliding(2, is_valid(Foo))) }").run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sliding_last - 1996-12-20T00:39:57.000000000,9223372036854775808,3650215962958587783,A,true - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,true - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,false - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,false - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,true - 1996-12-20T00:40:02.000000000,9223372036854775808,3650215962958587783,A,false - 1996-12-20T00:40:03.000000000,9223372036854775808,3650215962958587783,A,true - 1996-12-20T00:40:04.000000000,9223372036854775808,3650215962958587783,A,false + 1996-12-20T00:39:57.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,true + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,false + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,false + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:40:02.000000000,9223372036854775808,12960666915911099378,A,false + 1996-12-20T00:40:03.000000000,9223372036854775808,12960666915911099378,A,true + 1996-12-20T00:40:04.000000000,9223372036854775808,12960666915911099378,A,false "###); } @@ -596,8 +596,8 @@ async fn test_sliding_count_final_results() { // (19-20], (20-21], (21-22] insta::assert_snapshot!(QueryFixture::new("{ key: Numbers.key, m: Numbers.m, daily_count: count(Numbers, window=sliding(3, hourly()))}").with_final_results().run_to_csv(&data_fixture).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,key,m,daily_count - 1996-12-20T06:42:05.000000001,18446744073709551615,3650215962958587783,A,A,3,1 - 1996-12-20T06:42:05.000000001,18446744073709551615,11753611437813598533,B,,,0 + 1996-12-20T06:42:05.000000001,18446744073709551615,2867199309159137213,B,,,0 + 1996-12-20T06:42:05.000000001,18446744073709551615,12960666915911099378,A,A,3,1 "###); } @@ -605,7 +605,7 @@ async fn test_sliding_count_final_results() { async fn test_final_sliding_window_constant() { insta::assert_snapshot!(QueryFixture::new("{ sliding_const: Foo.n | sum(window = sliding(5, true)) }").with_final_results().run_to_csv(&window_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sliding_const - 1996-12-20T00:40:04.000000001,18446744073709551615,3650215962958587783,A,30.25 - 1996-12-20T00:40:04.000000001,18446744073709551615,11753611437813598533,B,3.9 + 1996-12-20T00:40:04.000000001,18446744073709551615,2867199309159137213,B,3.9 + 1996-12-20T00:40:04.000000001,18446744073709551615,12960666915911099378,A,30.25 "###); } diff --git a/crates/sparrow-main/tests/e2e/with_key_tests.rs b/crates/sparrow-main/tests/e2e/with_key_tests.rs index e00cd9144..47fd51710 100644 --- a/crates/sparrow-main/tests/e2e/with_key_tests.rs +++ b/crates/sparrow-main/tests/e2e/with_key_tests.rs @@ -47,12 +47,12 @@ pub(crate) async fn with_key_data_fixture() -> DataFixture { async fn test_with_key_i64_pipe() { insta::assert_snapshot!(QueryFixture::new("Table | with_key($input.foreign_key_i64)").run_to_csv(&with_key_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,foreign_key_i64,foreign_key_str,n - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,1996-12-20T00:39:57.000000000,0,A,0,B,0 - 1996-12-20T00:39:58.000000000,9223372036854775808,2359047937476779835,1,1996-12-20T00:39:58.000000000,0,B,1,A,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,1575016611515860288,2,1996-12-20T00:39:59.000000000,0,A,2,, - 1996-12-20T00:40:00.000000000,9223372036854775808,1575016611515860288,2,1996-12-20T00:40:00.000000000,0,A,2,C,2 - 1996-12-20T00:40:01.000000000,9223372036854775808,2359047937476779835,1,1996-12-20T00:40:01.000000000,0,A,1,A,3 - 1996-12-20T00:40:02.000000000,9223372036854775808,14253486467890685049,0,1996-12-20T00:40:02.000000000,0,A,0,B,4 + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,1996-12-20T00:39:57.000000000,0,A,0,B,0 + 1996-12-20T00:39:58.000000000,9223372036854775808,18433805721903975440,1,1996-12-20T00:39:58.000000000,0,B,1,A,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,2694864431690786590,2,1996-12-20T00:39:59.000000000,0,A,2,, + 1996-12-20T00:40:00.000000000,9223372036854775808,2694864431690786590,2,1996-12-20T00:40:00.000000000,0,A,2,C,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,18433805721903975440,1,1996-12-20T00:40:01.000000000,0,A,1,A,3 + 1996-12-20T00:40:02.000000000,9223372036854775808,11832085162654999889,0,1996-12-20T00:40:02.000000000,0,A,0,B,4 "###); } @@ -60,10 +60,10 @@ async fn test_with_key_i64_pipe() { async fn test_with_key_lookup_select() { insta::assert_snapshot!(QueryFixture::new("Table | with_key($input.foreign_key_i64) | last() | lookup(Table.foreign_key_i64) | when($input.foreign_key_i64 > 0)").run_to_csv(&with_key_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,foreign_key_i64,foreign_key_str,n - 1996-12-20T00:39:58.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:39:58.000000000,0,B,1,A,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:59.000000000,0,A,2,, - 1996-12-20T00:40:00.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:00.000000000,0,A,2,C,2 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:01.000000000,0,A,1,A,3 + 1996-12-20T00:39:58.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:39:58.000000000,0,B,1,A,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:59.000000000,0,A,2,, + 1996-12-20T00:40:00.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:00.000000000,0,A,2,C,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:01.000000000,0,A,1,A,3 "###); } @@ -71,12 +71,12 @@ async fn test_with_key_lookup_select() { async fn test_with_key_i64() { insta::assert_snapshot!(QueryFixture::new("with_key(Table.foreign_key_i64, Table)").run_to_csv(&with_key_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,foreign_key_i64,foreign_key_str,n - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,1996-12-20T00:39:57.000000000,0,A,0,B,0 - 1996-12-20T00:39:58.000000000,9223372036854775808,2359047937476779835,1,1996-12-20T00:39:58.000000000,0,B,1,A,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,1575016611515860288,2,1996-12-20T00:39:59.000000000,0,A,2,, - 1996-12-20T00:40:00.000000000,9223372036854775808,1575016611515860288,2,1996-12-20T00:40:00.000000000,0,A,2,C,2 - 1996-12-20T00:40:01.000000000,9223372036854775808,2359047937476779835,1,1996-12-20T00:40:01.000000000,0,A,1,A,3 - 1996-12-20T00:40:02.000000000,9223372036854775808,14253486467890685049,0,1996-12-20T00:40:02.000000000,0,A,0,B,4 + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,1996-12-20T00:39:57.000000000,0,A,0,B,0 + 1996-12-20T00:39:58.000000000,9223372036854775808,18433805721903975440,1,1996-12-20T00:39:58.000000000,0,B,1,A,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,2694864431690786590,2,1996-12-20T00:39:59.000000000,0,A,2,, + 1996-12-20T00:40:00.000000000,9223372036854775808,2694864431690786590,2,1996-12-20T00:40:00.000000000,0,A,2,C,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,18433805721903975440,1,1996-12-20T00:40:01.000000000,0,A,1,A,3 + 1996-12-20T00:40:02.000000000,9223372036854775808,11832085162654999889,0,1996-12-20T00:40:02.000000000,0,A,0,B,4 "###); } @@ -89,12 +89,12 @@ async fn test_with_key_aggregate_select() { "{ sum: Table.n | when(Table.key == 'A') | sum() | with_key(Table.foreign_key_i64) }" ).run_to_csv(&with_key_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,sum - 1996-12-20T00:39:57.000000000,9223372036854775808,14253486467890685049,0,0 - 1996-12-20T00:39:58.000000000,9223372036854775808,2359047937476779835,1, - 1996-12-20T00:39:59.000000000,9223372036854775808,1575016611515860288,2,0 - 1996-12-20T00:40:00.000000000,9223372036854775808,1575016611515860288,2,2 - 1996-12-20T00:40:01.000000000,9223372036854775808,2359047937476779835,1,5 - 1996-12-20T00:40:02.000000000,9223372036854775808,14253486467890685049,0,9 + 1996-12-20T00:39:57.000000000,9223372036854775808,11832085162654999889,0,0 + 1996-12-20T00:39:58.000000000,9223372036854775808,18433805721903975440,1, + 1996-12-20T00:39:59.000000000,9223372036854775808,2694864431690786590,2,0 + 1996-12-20T00:40:00.000000000,9223372036854775808,2694864431690786590,2,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,18433805721903975440,1,5 + 1996-12-20T00:40:02.000000000,9223372036854775808,11832085162654999889,0,9 "###) } @@ -106,7 +106,7 @@ async fn test_with_key_i64_parquet_output() { .run_to_parquet_hash(&with_key_data_fixture().await) .await .unwrap(), - @"CBDFADE9439A36B3400C3C3E4539F6045A3E59EE7B7B09A23DBD6AD3" + @"ECDEFE2C9B5BC4F19875EBE6D81EE5573500E9984CC42C91CD17E6FF" ) } @@ -114,12 +114,12 @@ async fn test_with_key_i64_parquet_output() { async fn test_with_computed_key_i64() { insta::assert_snapshot!(QueryFixture::new("with_key(Table.foreign_key_i64 + 1, Table)").run_to_csv(&with_key_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,foreign_key_i64,foreign_key_str,n - 1996-12-20T00:39:57.000000000,9223372036854775808,2359047937476779835,1,1996-12-20T00:39:57.000000000,0,A,0,B,0 - 1996-12-20T00:39:58.000000000,9223372036854775808,1575016611515860288,2,1996-12-20T00:39:58.000000000,0,B,1,A,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,14956259290599888306,3,1996-12-20T00:39:59.000000000,0,A,2,, - 1996-12-20T00:40:00.000000000,9223372036854775808,14956259290599888306,3,1996-12-20T00:40:00.000000000,0,A,2,C,2 - 1996-12-20T00:40:01.000000000,9223372036854775808,1575016611515860288,2,1996-12-20T00:40:01.000000000,0,A,1,A,3 - 1996-12-20T00:40:02.000000000,9223372036854775808,2359047937476779835,1,1996-12-20T00:40:02.000000000,0,A,0,B,4 + 1996-12-20T00:39:57.000000000,9223372036854775808,18433805721903975440,1,1996-12-20T00:39:57.000000000,0,A,0,B,0 + 1996-12-20T00:39:58.000000000,9223372036854775808,2694864431690786590,2,1996-12-20T00:39:58.000000000,0,B,1,A,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,5496774745203840792,3,1996-12-20T00:39:59.000000000,0,A,2,, + 1996-12-20T00:40:00.000000000,9223372036854775808,5496774745203840792,3,1996-12-20T00:40:00.000000000,0,A,2,C,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,2694864431690786590,2,1996-12-20T00:40:01.000000000,0,A,1,A,3 + 1996-12-20T00:40:02.000000000,9223372036854775808,18433805721903975440,1,1996-12-20T00:40:02.000000000,0,A,0,B,4 "###); } @@ -127,37 +127,26 @@ async fn test_with_computed_key_i64() { async fn test_with_computed_key_str() { insta::assert_snapshot!(QueryFixture::new("with_key(Table.foreign_key_str, Table)").run_to_csv(&with_key_data_fixture().await).await.unwrap(), @r###" _time,_subsort,_key_hash,_key,time,subsort,key,foreign_key_i64,foreign_key_str,n - 1996-12-20T00:39:57.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:39:57.000000000,0,A,0,B,0 - 1996-12-20T00:39:58.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:39:58.000000000,0,B,1,A,1 - 1996-12-20T00:39:59.000000000,9223372036854775808,8429509363638065888,,1996-12-20T00:39:59.000000000,0,A,2,, - 1996-12-20T00:40:00.000000000,9223372036854775808,9192031977313001967,C,1996-12-20T00:40:00.000000000,0,A,2,C,2 - 1996-12-20T00:40:01.000000000,9223372036854775808,3650215962958587783,A,1996-12-20T00:40:01.000000000,0,A,1,A,3 - 1996-12-20T00:40:02.000000000,9223372036854775808,11753611437813598533,B,1996-12-20T00:40:02.000000000,0,A,0,B,4 + 1996-12-20T00:39:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:39:57.000000000,0,A,0,B,0 + 1996-12-20T00:39:58.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:58.000000000,0,B,1,A,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,5663277146615294718,,1996-12-20T00:39:59.000000000,0,A,2,, + 1996-12-20T00:40:00.000000000,9223372036854775808,2521269998124177631,C,1996-12-20T00:40:00.000000000,0,A,2,C,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:01.000000000,0,A,1,A,3 + 1996-12-20T00:40:02.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:40:02.000000000,0,A,0,B,4 "###); } +#[ignore = "https://github.com/kaskada-ai/kaskada/issues/644"] #[tokio::test] -async fn test_with_key_unsupported_type() { - insta::assert_yaml_snapshot!(QueryFixture::new("with_key({k: Table.foreign_key_str}, Table)").run_to_csv(&with_key_data_fixture().await).await.unwrap_err(), @r###" - --- - code: Client specified an invalid argument - message: 1 errors in Fenl statements; see diagnostics - fenl_diagnostics: - - severity: error - code: E0010 - message: Invalid argument type(s) - formatted: - - "error[E0010]: Invalid argument type(s)" - - " --> Query:1:1" - - " |" - - "1 | with_key({k: Table.foreign_key_str}, Table)" - - " | ^^^^^^^^ -------------------------- Type: {k: string}" - - " | | " - - " | Invalid types for call to 'with_key'" - - " |" - - " = Expected 'key'" - - "" - - "" +async fn test_with_computed_key_str_last_no_simplification() { + insta::assert_snapshot!(QueryFixture::new("with_key(Table.foreign_key_str, Table) | last()").without_simplification().run_to_csv(&with_key_data_fixture().await).await.unwrap(), @r###" + _time,_subsort,_key_hash,_key,time,subsort,key,foreign_key_i64,foreign_key_str,n + 1996-12-20T00:39:57.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:39:57.000000000,0,A,0,B,0 + 1996-12-20T00:39:58.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:39:58.000000000,0,B,1,A,1 + 1996-12-20T00:39:59.000000000,9223372036854775808,5663277146615294718,,1996-12-20T00:39:59.000000000,0,A,2,, + 1996-12-20T00:40:00.000000000,9223372036854775808,2521269998124177631,C,1996-12-20T00:40:00.000000000,0,A,2,C,2 + 1996-12-20T00:40:01.000000000,9223372036854775808,12960666915911099378,A,1996-12-20T00:40:01.000000000,0,A,1,A,3 + 1996-12-20T00:40:02.000000000,9223372036854775808,2867199309159137213,B,1996-12-20T00:40:02.000000000,0,A,0,B,4 "###); } diff --git a/crates/sparrow-materialize/src/materialize/materialization.rs b/crates/sparrow-materialize/src/materialize/materialization.rs index ce3240226..f1c3a72d2 100644 --- a/crates/sparrow-materialize/src/materialize/materialization.rs +++ b/crates/sparrow-materialize/src/materialize/materialization.rs @@ -1,5 +1,6 @@ use error_stack::ResultExt; -use sparrow_api::kaskada::v1alpha::{ComputePlan, ComputeTable, Destination, ExecuteResponse}; +use sparrow_api::kaskada::v1alpha::{ComputePlan, ComputeTable, ExecuteResponse}; +use sparrow_runtime::execute::output::Destination; use tokio_stream::Stream; use crate::Error; diff --git a/crates/sparrow-merge/Cargo.toml b/crates/sparrow-merge/Cargo.toml new file mode 100644 index 000000000..71ba75939 --- /dev/null +++ b/crates/sparrow-merge/Cargo.toml @@ -0,0 +1,41 @@ +[package] +name = "sparrow-merge" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +publish = false +description = """ +Implementation of merge kernels and related pipelines. +""" + +[features] +testing = ["arrow-csv", "proptest"] + +[dependencies] +anyhow.workspace = true +arrow-arith.workspace = true +arrow-array.workspace = true +arrow-csv = { workspace = true, optional = true } +arrow-schema.workspace = true +arrow-select.workspace = true +async-stream.workspace = true +bit-set.workspace = true +derive_more.workspace = true +error-stack.workspace = true +futures.workspace = true +itertools.workspace = true +proptest = { workspace = true, optional = true } +smallvec.workspace = true +sparrow-arrow = { path = "../sparrow-arrow" } +sparrow-core = { path = "../sparrow-core" } +tokio.workspace = true +tracing.workspace = true + +[dev-dependencies] +arrow-csv.workspace = true +arrow-ord.workspace = true +proptest.workspace = true + +[lib] +doctest = false diff --git a/crates/sparrow-merge/src/in_memory_batches.rs b/crates/sparrow-merge/src/in_memory_batches.rs new file mode 100644 index 000000000..d58e12f39 --- /dev/null +++ b/crates/sparrow-merge/src/in_memory_batches.rs @@ -0,0 +1,116 @@ +use std::sync::RwLock; + +use arrow_array::RecordBatch; +use arrow_schema::SchemaRef; +use error_stack::{IntoReport, IntoReportCompat, ResultExt}; +use futures::Stream; + +use crate::old::homogeneous_merge; + +#[derive(derive_more::Display, Debug)] +pub enum Error { + #[display(fmt = "failed to add in-memory batch")] + Add, + #[display(fmt = "receiver lagged")] + ReceiverLagged, +} + +impl error_stack::Context for Error {} + +/// Struct for managing in-memory batches. +#[derive(Debug)] +pub struct InMemoryBatches { + pub schema: SchemaRef, + current: RwLock<(usize, RecordBatch)>, + updates: tokio::sync::broadcast::Sender<(usize, RecordBatch)>, + /// A subscriber that is never used -- it exists only to keep the sender + /// alive. + _subscriber: tokio::sync::broadcast::Receiver<(usize, RecordBatch)>, +} + +impl InMemoryBatches { + pub fn new(schema: SchemaRef) -> Self { + let (updates, _subscriber) = tokio::sync::broadcast::channel(10); + let merged = RecordBatch::new_empty(schema.clone()); + Self { + schema, + current: RwLock::new((0, merged)), + updates, + _subscriber, + } + } + + /// Add a batch, merging it into the in-memory version. + /// + /// Publishes the new batch to the subscribers. + pub fn add_batch(&self, batch: RecordBatch) -> error_stack::Result<(), Error> { + if batch.num_rows() == 0 { + return Ok(()); + } + + let new_version = { + let mut write = self.current.write().map_err(|_| Error::Add)?; + let (version, old) = &*write; + let version = *version; + + let merged = if old.num_rows() == 0 { + batch.clone() + } else { + homogeneous_merge(&self.schema, vec![old.clone(), batch.clone()]) + .into_report() + .change_context(Error::Add)? + }; + + *write = (version + 1, merged); + version + 1 + }; + + self.updates + .send((new_version, batch)) + .into_report() + .change_context(Error::Add)?; + Ok(()) + } + + /// Create a stream subscribed to the batches. + /// + /// The first batch will be the in-memory merged batch, and batches will be + /// added as they arrive. + pub fn subscribe( + &self, + ) -> impl Stream> + 'static { + let (mut version, merged) = self.current.read().unwrap().clone(); + let mut recv = self.updates.subscribe(); + + async_stream::try_stream! { + tracing::info!("Starting subscriber with version {version}"); + yield merged; + + loop { + match recv.recv().await { + Ok((recv_version, batch)) => { + if version < recv_version { + tracing::info!("Recevied version {recv_version}"); + yield batch; + version = recv_version; + } else { + tracing::warn!("Ignoring old version {recv_version}"); + } + } + Err(tokio::sync::broadcast::error::RecvError::Closed) => { + tracing::info!("Sender closed."); + break; + }, + Err(tokio::sync::broadcast::error::RecvError::Lagged(_)) => { + Err(Error::ReceiverLagged)?; + } + } + } + } + } + + /// Retrieve the current in-memory batch. + pub fn current(&self) -> RecordBatch { + self.current.read().unwrap().1.clone() + } +} diff --git a/crates/sparrow-plan/src/lib.rs b/crates/sparrow-merge/src/lib.rs similarity index 56% rename from crates/sparrow-plan/src/lib.rs rename to crates/sparrow-merge/src/lib.rs index 051ffabf0..b13f73871 100644 --- a/crates/sparrow-plan/src/lib.rs +++ b/crates/sparrow-merge/src/lib.rs @@ -1,4 +1,4 @@ -//! Serializable representation of Sparrow computation plans and helpers. +//! Merge related functions and pipelines. #![warn( rust_2018_idioms, nonstandard_style, @@ -9,10 +9,7 @@ clippy::undocumented_unsafe_blocks )] -pub use ids::*; -pub use inst::*; -pub use value::*; +mod in_memory_batches; +pub mod old; -mod ids; -mod inst; -mod value; +pub use in_memory_batches::*; diff --git a/crates/sparrow-merge/src/old.rs b/crates/sparrow-merge/src/old.rs new file mode 100644 index 000000000..72edcab3e --- /dev/null +++ b/crates/sparrow-merge/src/old.rs @@ -0,0 +1,12 @@ +mod binary_merge; +mod gatherer; +mod homogeneous_merge; +mod input; + +#[cfg(any(test, feature = "testing"))] +pub mod testing; + +pub use binary_merge::{binary_merge, BinaryMergeInput}; +pub use gatherer::*; +pub use homogeneous_merge::*; +pub use input::*; diff --git a/crates/sparrow-runtime/src/merge/binary_merge.rs b/crates/sparrow-merge/src/old/binary_merge.rs similarity index 97% rename from crates/sparrow-runtime/src/merge/binary_merge.rs rename to crates/sparrow-merge/src/old/binary_merge.rs index 34ec97a1e..96788243e 100644 --- a/crates/sparrow-runtime/src/merge/binary_merge.rs +++ b/crates/sparrow-merge/src/old/binary_merge.rs @@ -7,10 +7,9 @@ use std::cmp::Ordering; -use arrow::array::{ - ArrayRef, TimestampNanosecondArray, TimestampNanosecondBuilder, UInt64Array, UInt64Builder, -}; -use arrow::error::ArrowError; +use arrow_array::builder::{TimestampNanosecondBuilder, UInt64Builder}; +use arrow_array::{ArrayRef, TimestampNanosecondArray, UInt64Array}; +use arrow_schema::ArrowError; /// Struct describing one of the two inputs to the [binary_merge] /// function. @@ -58,7 +57,7 @@ impl<'a> BinaryMergeInput<'a> { /// Creates a [MergeInput] from the record batch. /// /// This assumes the key columns are present as the first 3 columns. - pub fn from_batch(batch: &'a arrow::record_batch::RecordBatch) -> anyhow::Result { + pub fn from_batch(batch: &'a arrow_array::RecordBatch) -> anyhow::Result { use sparrow_core::TableSchema; anyhow::ensure!( @@ -256,19 +255,19 @@ impl<'a> BinaryMergeInput<'a> { #[derive(Debug, PartialEq)] pub struct BinaryMergeResult { /// The merged time column. - pub(crate) time: TimestampNanosecondArray, + pub time: TimestampNanosecondArray, /// The merged subsort column. - pub(crate) subsort: UInt64Array, + pub subsort: UInt64Array, /// The merged key_hash column. - pub(crate) key_hash: UInt64Array, + pub key_hash: UInt64Array, /// A column of (nullable) integers which may be used with /// [arrow::compute::take] to convert from an array of values associated /// with the keys of `a` to the merged result. - pub(crate) take_a: UInt64Array, + pub take_a: UInt64Array, /// A column of (nullable) integers which may be used with /// [arrow::compute::take] to convert from an array of values associated /// with the keys of `b` to the merged result. - pub(crate) take_b: UInt64Array, + pub take_b: UInt64Array, } /// A key to be merged. Ordering is lexicographic on `(time, subsort, @@ -536,12 +535,12 @@ fn append_values( #[cfg(test)] mod tests { - use arrow::array::{Array, TimestampNanosecondArray, UInt64Array}; - use arrow::compute::FilterBuilder; + use arrow_array::{Array, TimestampNanosecondArray, UInt64Array}; + use arrow_select::filter::FilterBuilder; use proptest::prelude::*; use super::*; - use crate::merge::testing::arb_key_triples; + use crate::old::testing::arb_key_triples; #[test] fn run_length_lt_test() { @@ -617,7 +616,7 @@ mod tests { result: &'a BinaryMergeResult, take_indices: &'a UInt64Array, ) -> (ArrayRef, ArrayRef, ArrayRef) { - use arrow::compute::is_not_null; + use arrow_arith::boolean::is_not_null; let is_valid = is_not_null(take_indices).unwrap(); let filter = FilterBuilder::new(&is_valid).optimize().build(); diff --git a/crates/sparrow-runtime/src/merge/gatherer.rs b/crates/sparrow-merge/src/old/gatherer.rs similarity index 98% rename from crates/sparrow-runtime/src/merge/gatherer.rs rename to crates/sparrow-merge/src/old/gatherer.rs index e997eca77..753f92689 100644 --- a/crates/sparrow-runtime/src/merge/gatherer.rs +++ b/crates/sparrow-merge/src/old/gatherer.rs @@ -12,11 +12,11 @@ use anyhow::Context; use bit_set::BitSet; use itertools::Itertools; -use crate::merge::input::{InputItem, OrderedInputs}; +use crate::old::input::{InputItem, OrderedInputs}; /// Gathered batches to be merged. #[derive(Debug, PartialEq, Eq)] -pub(crate) struct GatheredBatches { +pub struct GatheredBatches { /// For each participating stream, the sequence of gathered input batches. /// /// This may be multiple batches for each input stream -- for instance, the @@ -30,17 +30,17 @@ pub(crate) struct GatheredBatches { /// This uses a small vector to minimize indirections. We generally /// anticipate this being small, so 4 slots seems reasonable (although /// somewhat arbitrary). - pub(crate) batches: Vec>, + pub batches: Vec>, /// The minimum time (possibly) included in the gathered batches. - pub(crate) min_time_inclusive: i64, + pub min_time_inclusive: i64, /// The maximum time (possibly) included in the gathered batches. - pub(crate) max_time_inclusive: i64, + pub max_time_inclusive: i64, } #[derive(Debug)] -pub(crate) struct Gatherer { +pub struct Gatherer { remaining_sources: usize, /// The last input was emitted up to (but excluding) the given time. pub last_output_time: Option, diff --git a/crates/sparrow-runtime/src/merge/homogeneous_merge.rs b/crates/sparrow-merge/src/old/homogeneous_merge.rs similarity index 81% rename from crates/sparrow-runtime/src/merge/homogeneous_merge.rs rename to crates/sparrow-merge/src/old/homogeneous_merge.rs index e94da6c33..93168e80e 100644 --- a/crates/sparrow-runtime/src/merge/homogeneous_merge.rs +++ b/crates/sparrow-merge/src/old/homogeneous_merge.rs @@ -1,22 +1,20 @@ +use std::collections::BinaryHeap; use std::sync::Arc; use anyhow::Context; -use arrow::array::ArrayRef; -use arrow::datatypes::SchemaRef; -use arrow::record_batch::RecordBatch; +use arrow_array::{ArrayRef, RecordBatch}; +use arrow_schema::SchemaRef; use itertools::izip; use sparrow_arrow::downcast::downcast_primitive_array; -use crate::merge::binary_merge::BinaryMergeResult; -use crate::merge::{binary_merge, BinaryMergeInput}; -use crate::min_heap::{HasPriority, MinHeap}; +use crate::old::binary_merge::BinaryMergeResult; +use crate::old::{binary_merge, BinaryMergeInput}; /// Merges 0 or more batches with the same schema into a single result. /// /// This applies an iterative 2-way merge strategy, concatenating the /// batches in increasing order of size. -#[allow(dead_code)] -pub(crate) fn homogeneous_merge( +pub fn homogeneous_merge( schema: &SchemaRef, batches: impl IntoIterator, ) -> anyhow::Result { @@ -34,7 +32,7 @@ pub(crate) fn homogeneous_merge( to_merge.push(PendingMerge(batch)) } } - let mut to_merge: MinHeap<_> = MinHeap::from(to_merge); + let mut to_merge: BinaryHeap<_> = BinaryHeap::from(to_merge); // Do the actual merge -- if there are no non-empty batches we're done. if to_merge.is_empty() { return Ok(RecordBatch::new_empty(schema.clone())); @@ -79,16 +77,16 @@ fn do_merge(schema: &SchemaRef, a: PendingMerge, b: PendingMerge) -> anyhow::Res // We could go further -- we could build up a vector of which input each // row should be taken from, and then use that. This would allow us to // defer the intermediate merges (and thus intermediate allocations). - let a = arrow::compute::take(a.as_ref(), &take_a, None)?; - let b = arrow::compute::take(b.as_ref(), &take_b, None)?; + let a = arrow_select::take::take(a.as_ref(), &take_a, None)?; + let b = arrow_select::take::take(b.as_ref(), &take_b, None)?; // TODO: As implemented, this will prefer items from `a`. Since we merge ordered // by size, this is potentially non-deterministic if two files have // duplicate rows and the same length. We should figure out if we should // (a) fail (and do so) or (b) allow indicating which side to prefer. // The simplest would be for one file to be "newer". - let a_is_valid = arrow::compute::is_not_null(a.as_ref())?; - let merged = arrow::compute::kernels::zip::zip(&a_is_valid, a.as_ref(), b.as_ref())?; + let a_is_valid = arrow_arith::boolean::is_not_null(a.as_ref())?; + let merged = arrow_select::zip::zip(&a_is_valid, a.as_ref(), b.as_ref())?; columns.push(merged); } @@ -110,24 +108,36 @@ impl PendingMerge { } } -impl HasPriority for PendingMerge { - type Priority = usize; +impl std::cmp::PartialEq for PendingMerge { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } +} + +impl std::cmp::Eq for PendingMerge {} + +impl std::cmp::PartialOrd for PendingMerge { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} - fn priority(&self) -> Self::Priority { - self.0.num_rows() +impl std::cmp::Ord for PendingMerge { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.0.num_rows().cmp(&other.0.num_rows()) } } #[cfg(test)] mod tests { - use arrow::array::UInt8Array; - use arrow::record_batch::RecordBatch; + use arrow_array::RecordBatch; + use arrow_array::UInt8Array; use proptest::prelude::*; use sparrow_core::TableSchema; use super::*; - use crate::merge::testing::arb_key_triples; + use crate::old::testing::arb_key_triples; proptest! { #[test] @@ -186,8 +196,8 @@ mod tests { let split_array = UInt8Array::from(split); let inputs: Vec<_> = (0..inputs) .map(|n| { - let filter = arrow::compute::eq_scalar(&split_array, n).unwrap(); - arrow::compute::filter_record_batch(&merged, &filter).unwrap() + let filter = arrow_ord::comparison::eq_scalar(&split_array, n).unwrap(); + arrow_select::filter::filter_record_batch(&merged, &filter).unwrap() }) .collect(); diff --git a/crates/sparrow-runtime/src/merge/input.rs b/crates/sparrow-merge/src/old/input.rs similarity index 78% rename from crates/sparrow-runtime/src/merge/input.rs rename to crates/sparrow-merge/src/old/input.rs index c11a0747d..9898f6f2b 100644 --- a/crates/sparrow-runtime/src/merge/input.rs +++ b/crates/sparrow-merge/src/old/input.rs @@ -1,12 +1,10 @@ use smallvec::SmallVec; -use crate::Batch; - /// An `InputItem` is is a splittable container of time-ordered data. /// /// The typical implementation used is `MergeInput` which implements this /// for an ordered `RecordBatch`. -pub(crate) trait InputItem: Sized { +pub trait InputItem: Sized { /// Return the minimum time in `self`. fn min_time(&self) -> i64; @@ -23,7 +21,7 @@ pub(crate) trait InputItem: Sized { /// A collection of zero or more `InputItems` ordered by time. #[derive(Debug, PartialEq, Eq)] -pub(crate) struct OrderedInputs { +pub struct OrderedInputs { /// The input items. /// /// We use a `SmallVec` because in most cases we're dealing with a @@ -54,14 +52,20 @@ pub(crate) struct OrderedInputs { pub(super) max_time: i64, } -impl OrderedInputs { - pub fn new() -> Self { +impl Default for OrderedInputs { + fn default() -> Self { Self { items: SmallVec::new(), min_time: i64::MIN, max_time: i64::MIN, } } +} + +impl OrderedInputs { + pub fn new() -> Self { + Self::default() + } pub fn is_empty(&self) -> bool { self.items.is_empty() @@ -87,7 +91,9 @@ impl OrderedInputs { Ok(()) } +} +impl OrderedInputs { /// Add an item to this `OrderedInputs`. /// /// This will panic if the item's times are not monotonically increasing @@ -232,111 +238,10 @@ impl std::ops::Index for OrderedInputs { } } -impl InputItem for Batch { - fn min_time(&self) -> i64 { - self.lower_bound.time - } - - fn max_time(&self) -> i64 { - self.upper_bound.time - } - - fn split_at(self, split_time: i64) -> anyhow::Result<(Option, Option)> { - if self.is_empty() { - return Ok((None, None)); - } else if split_time <= self.min_time() { - return Ok((None, Some(self))); - } else if split_time > self.max_time() { - return Ok((Some(self), None)); - } - - let times = self.times()?; - let split_point = match times.binary_search(&split_time) { - Ok(mut found_index) => { - // Just do a linear search for the first value less than split time. - while found_index > 0 && times[found_index - 1] == split_time { - found_index -= 1 - } - found_index - } - Err(not_found_index) => not_found_index, - }; - - let lt = if split_point > 0 { - let lt = self.data.slice(0, split_point); - Some(Batch::try_new_from_batch(lt)?) - } else { - None - }; - - let gte = if split_point < self.num_rows() { - let gte = self.data.slice(split_point, self.num_rows() - split_point); - Some(Batch::try_new_from_batch(gte)?) - } else { - None - }; - Ok((lt, gte)) - } -} - #[cfg(test)] mod tests { - use std::ops::RangeInclusive; - use std::sync::Arc; - - use arrow::datatypes::{DataType, Field}; - use arrow::record_batch::RecordBatch; - use itertools::Itertools; - use proptest::prelude::*; - use sparrow_core::TableSchema; - use super::*; - use crate::merge::testing::{arb_i64_array, arb_key_triples}; - - fn arb_batch(max_len: usize) -> impl Strategy { - (1..max_len) - .prop_flat_map(|len| (arb_key_triples(len), arb_i64_array(len))) - .prop_map(|((time, subsort, key_hash), values)| { - let schema = TableSchema::from_data_fields([Arc::new(Field::new( - "data", - DataType::Int64, - true, - ))]) - .unwrap(); - let schema = schema.schema_ref(); - RecordBatch::try_new( - schema.clone(), - vec![ - Arc::new(time), - Arc::new(subsort), - Arc::new(key_hash), - Arc::new(values), - ], - ) - .unwrap() - }) - } - - proptest! { - #[test] - fn test_splitting(batch in arb_batch(1000)) { - // For every time value in the batch, try splitting there and make sure - // the ordering constraints are satisfied. - let input = Batch::try_new_from_batch(batch).unwrap(); - let times = input.times().unwrap(); - - for split_time in times.iter().dedup() { - let (lt, gte) = input.clone().split_at(*split_time).unwrap(); - - if let Some(lt) = lt { - lt.times().unwrap().iter().all(|t| *t < *split_time); - } - if let Some(gte) = gte { - gte.times().unwrap().iter().all(|t| *t >= *split_time); - } - } - } - } + use std::ops::RangeInclusive; fn ranges( ranges: impl IntoIterator>, diff --git a/crates/sparrow-runtime/src/merge/testing.rs b/crates/sparrow-merge/src/old/testing.rs similarity index 84% rename from crates/sparrow-runtime/src/merge/testing.rs rename to crates/sparrow-merge/src/old/testing.rs index 2d0ecd69b..9f49a6c41 100644 --- a/crates/sparrow-runtime/src/merge/testing.rs +++ b/crates/sparrow-merge/src/old/testing.rs @@ -1,7 +1,6 @@ use std::fmt::Write; -use arrow::array::{Int64Array, TimestampNanosecondArray, UInt64Array}; -use arrow::record_batch::RecordBatch; +use arrow_array::{Int64Array, RecordBatch, TimestampNanosecondArray, UInt64Array}; use proptest::prelude::*; prop_compose! { @@ -9,7 +8,7 @@ prop_compose! { /// /// We do this by generating a random sequence of steps (between 1 and 1000), /// and adding this to the previous key. We round keys to 0..19 and subsorts to 0..49. - pub(crate) fn arb_key_triples(len: impl Into)(steps in prop::collection::vec(1..1000u64, len)) -> + pub fn arb_key_triples(len: impl Into)(steps in prop::collection::vec(1..1000u64, len)) -> (TimestampNanosecondArray, UInt64Array, UInt64Array) { let key_triples: Vec<_> = steps.iter().scan((0i64, 0u64, 0u64), |s, step| { s.2 += step; @@ -29,7 +28,7 @@ prop_compose! { } /// Create an arbitrary i64 arary. -pub(crate) fn arb_i64_array(len: usize) -> impl Strategy { +pub fn arb_i64_array(len: usize) -> impl Strategy { prop::collection::vec(prop::option::weighted(0.9, prop::num::i64::ANY), len) .prop_map(Int64Array::from) } @@ -39,7 +38,7 @@ pub(crate) fn arb_i64_array(len: usize) -> impl Strategy { /// The `Debug` format of record batches is extremely verbose, which /// makes it impossible to read the generated test cases. This allows /// us to create a custom debug format. -pub(crate) struct TestBatches(pub Vec); +pub struct TestBatches(pub Vec); /// Adapt a `std::fmt::Formatter` to `std::io::Write`. struct DisplayWriter<'a, 'b>(&'a mut std::fmt::Formatter<'b>); @@ -66,7 +65,7 @@ impl std::fmt::Debug for TestBatches { for batch in &self.0 { { - let mut writer = arrow::csv::Writer::new(DisplayWriter(f)); + let mut writer = arrow_csv::Writer::new(DisplayWriter(f)); writer.write(batch).map_err(|_| std::fmt::Error)?; } diff --git a/crates/sparrow-physical/Cargo.toml b/crates/sparrow-physical/Cargo.toml index 180b7a27d..6a0ee8717 100644 --- a/crates/sparrow-physical/Cargo.toml +++ b/crates/sparrow-physical/Cargo.toml @@ -16,6 +16,7 @@ enum-as-inner.workspace = true index_vec.workspace = true serde.workspace = true sparrow-arrow = { path = "../sparrow-arrow" } +strum_macros.workspace = true [dev-dependencies] serde_yaml.workspace = true diff --git a/crates/sparrow-physical/src/step.rs b/crates/sparrow-physical/src/step.rs index a06bf6f31..08b77921b 100644 --- a/crates/sparrow-physical/src/step.rs +++ b/crates/sparrow-physical/src/step.rs @@ -26,6 +26,8 @@ index_vec::define_index_type! { /// on the newly computed keys. #[derive(Debug, serde::Serialize, serde::Deserialize)] pub struct Step { + /// The ID of the step. + pub id: StepId, /// The kind of step being performed. pub kind: StepKind, /// Inputs to this step. @@ -35,7 +37,7 @@ pub struct Step { } /// The kinds of steps that can occur in the physical plan. -#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[derive(Debug, serde::Serialize, serde::Deserialize, strum_macros::IntoStaticStr)] #[serde(rename_all = "snake_case")] pub enum StepKind { diff --git a/crates/sparrow-plan/Cargo.toml b/crates/sparrow-plan/Cargo.toml deleted file mode 100644 index 07a3db053..000000000 --- a/crates/sparrow-plan/Cargo.toml +++ /dev/null @@ -1,32 +0,0 @@ -[package] -name = "sparrow-plan" -version.workspace = true -authors.workspace = true -edition.workspace = true -license.workspace = true -publish = false -description = """ -Execution plans for Sparrow. -""" - -[dependencies] -anyhow.workspace = true -arrow.workspace = true -enum-map.workspace = true -hashbrown.workspace = true -itertools.workspace = true -parse-display.workspace = true -sparrow-api = { path = "../sparrow-api" } -sparrow-arrow = { path = "../sparrow-arrow" } -sparrow-syntax = { path = "../sparrow-syntax" } -static_init.workspace = true -strum.workspace = true -strum_macros.workspace = true -tracing.workspace = true -uuid.workspace = true - -[dev-dependencies] -serde_yaml.workspace = true - -[lib] -doctest = false diff --git a/crates/sparrow-qfr-tool/Cargo.toml b/crates/sparrow-qfr-tool/Cargo.toml index 24eb9d6ec..59d0a9830 100644 --- a/crates/sparrow-qfr-tool/Cargo.toml +++ b/crates/sparrow-qfr-tool/Cargo.toml @@ -23,7 +23,6 @@ serde_yaml.workspace = true smallvec.workspace = true sparrow-api = { path = "../sparrow-api" } sparrow-compiler = { path = "../sparrow-compiler" } -sparrow-plan = { path = "../sparrow-plan" } sparrow-qfr = { path = "../sparrow-qfr" } sparrow-syntax = { path = "../sparrow-syntax" } tracing.workspace = true diff --git a/crates/sparrow-runtime/Cargo.toml b/crates/sparrow-runtime/Cargo.toml index 2f9da71e8..d54a2c775 100644 --- a/crates/sparrow-runtime/Cargo.toml +++ b/crates/sparrow-runtime/Cargo.toml @@ -18,6 +18,8 @@ pulsar = ["dep:pulsar", "avro", "lz4"] ahash.workspace = true anyhow.workspace = true arrow.workspace = true +arrow-array.workspace = true +arrow-select.workspace = true async-once-cell.workspace = true async-stream.workspace = true async-trait.workspace = true @@ -63,7 +65,7 @@ sparrow-compiler = { path = "../sparrow-compiler" } sparrow-core = { path = "../sparrow-core" } sparrow-instructions = { path = "../sparrow-instructions" } sparrow-kernels = { path = "../sparrow-kernels" } -sparrow-plan = { path = "../sparrow-plan" } +sparrow-merge = { path = "../sparrow-merge", features = ["testing"] } sparrow-qfr = { path = "../sparrow-qfr" } sparrow-syntax = { path = "../sparrow-syntax" } static_init.workspace = true @@ -88,8 +90,4 @@ tempfile.workspace = true [lib] bench = false -doctest = false - -[[bench]] -name = "main" -harness = false +doctest = false \ No newline at end of file diff --git a/crates/sparrow-runtime/benches/binary_merge_bench.rs b/crates/sparrow-runtime/benches/binary_merge_bench.rs deleted file mode 100644 index d8e55e489..000000000 --- a/crates/sparrow-runtime/benches/binary_merge_bench.rs +++ /dev/null @@ -1,98 +0,0 @@ -use std::sync::Arc; - -use arrow::array::{TimestampNanosecondArray, UInt64Array}; -use arrow::datatypes::Schema; -use arrow::record_batch::RecordBatch; -use criterion::{BenchmarkId, Criterion, Throughput}; -use rand::prelude::StdRng; -use rand::{Rng, SeedableRng}; -use sparrow_core::TableSchema; -use sparrow_runtime::merge::{binary_merge, BinaryMergeInput}; - -fn rand_keys(schema: &TableSchema, size: usize, min_step: usize, max_step: usize) -> RecordBatch { - // This is a bit tricky because we want to (a) generate increasing sequences of - // keys, (b) without duplicates while (c) still using all parts of the key. - - let mut time_builder = TimestampNanosecondArray::builder(size); - let mut subsort_builder = UInt64Array::builder(size); - let mut key_hash_builderd = UInt64Array::builder(size); - - let mut time = 0i64; - let mut subsort = 0u64; - let mut key_hash = 0u64; - - let mut rng = StdRng::seed_from_u64(42); - - for _ in 0..size { - // Figure out which of the 3 fields we're going to increase. - // 60% increase occurrence, 30% increase arrival, 10% increase entity. - let increase = rng.gen_range(0..=10); - if increase < 6 { - time += rng.gen_range(min_step as i64..max_step as i64); - subsort = rng.gen_range(min_step as u64..max_step as u64); - key_hash = rng.gen_range(min_step as u64..max_step as u64); - } else if increase < 9 { - subsort += rng.gen_range(min_step as u64..max_step as u64); - key_hash = rng.gen_range(min_step as u64..max_step as u64); - } else { - key_hash += rng.gen_range(min_step as u64..max_step as u64); - } - - time_builder.append_value(time); - subsort_builder.append_value(subsort); - key_hash_builderd.append_value(key_hash); - } - - RecordBatch::try_new( - schema.schema_ref().clone(), - vec![ - Arc::new(time_builder.finish()), - Arc::new(subsort_builder.finish()), - Arc::new(key_hash_builderd.finish()), - ], - ) - .unwrap() -} - -fn rand_sparse(schema: &TableSchema, size: usize) -> RecordBatch { - rand_keys(schema, size, 200, 250) -} - -fn rand_dense(schema: &TableSchema, size: usize) -> RecordBatch { - rand_keys(schema, size, 1, 5) -} - -fn bench_merge(batch_a: &RecordBatch, batch_b: &RecordBatch) { - let input_a = - BinaryMergeInput::from_array_refs(batch_a.column(0), batch_a.column(1), batch_a.column(2)) - .unwrap(); - let input_b = - BinaryMergeInput::from_array_refs(batch_b.column(0), batch_b.column(1), batch_b.column(2)) - .unwrap(); - criterion::black_box(binary_merge(input_a, input_b).unwrap()); -} - -pub fn merge_benchmarks(c: &mut Criterion) { - let mut group = c.benchmark_group("binary_merge"); - - let schema = TableSchema::try_from_data_schema(&Schema::empty()).unwrap(); - let dense_a = rand_dense(&schema, 2000000); - let dense_b = rand_dense(&schema, 2000000); - let sparse_a = rand_sparse(&schema, 1000000); - let sparse_b = rand_sparse(&schema, 1000000); - - group.throughput(Throughput::Elements(2000000)); - group.bench_function(BenchmarkId::from_parameter("two_sparse"), |b| { - b.iter(|| bench_merge(&sparse_a, &sparse_b)) - }); - - group.throughput(Throughput::Elements(3000000)); - group.bench_function(BenchmarkId::from_parameter("one_dense_one_sparse"), |b| { - b.iter(|| bench_merge(&dense_a, &sparse_b)) - }); - - group.throughput(Throughput::Elements(4000000)); - group.bench_function(BenchmarkId::from_parameter("two_dense"), |b| { - b.iter(|| bench_merge(&dense_a, &dense_b)) - }); -} diff --git a/crates/sparrow-runtime/benches/main.rs b/crates/sparrow-runtime/benches/main.rs deleted file mode 100644 index dbcc83036..000000000 --- a/crates/sparrow-runtime/benches/main.rs +++ /dev/null @@ -1,15 +0,0 @@ -#![warn( - rust_2018_idioms, - nonstandard_style, - future_incompatible, - clippy::mod_module_files, - clippy::print_stdout, - clippy::print_stderr -)] - -use criterion::{criterion_group, criterion_main}; - -mod binary_merge_bench; - -criterion_group!(benches, binary_merge_bench::merge_benchmarks); -criterion_main!(benches); diff --git a/crates/sparrow-runtime/src/batch.rs b/crates/sparrow-runtime/src/batch.rs index 53cd5043c..32e180cca 100644 --- a/crates/sparrow-runtime/src/batch.rs +++ b/crates/sparrow-runtime/src/batch.rs @@ -399,3 +399,109 @@ impl<'a> std::fmt::Display for AsJson<'a> { write!(f, "{json_string}") } } + +impl sparrow_merge::old::InputItem for Batch { + fn min_time(&self) -> i64 { + self.lower_bound.time + } + + fn max_time(&self) -> i64 { + self.upper_bound.time + } + + fn split_at(self, split_time: i64) -> anyhow::Result<(Option, Option)> { + if self.is_empty() { + return Ok((None, None)); + } else if split_time <= self.min_time() { + return Ok((None, Some(self))); + } else if split_time > self.max_time() { + return Ok((Some(self), None)); + } + + let times = self.times()?; + let split_point = match times.binary_search(&split_time) { + Ok(mut found_index) => { + // Just do a linear search for the first value less than split time. + while found_index > 0 && times[found_index - 1] == split_time { + found_index -= 1 + } + found_index + } + Err(not_found_index) => not_found_index, + }; + + let lt = if split_point > 0 { + let lt = self.data.slice(0, split_point); + Some(Batch::try_new_from_batch(lt)?) + } else { + None + }; + + let gte = if split_point < self.num_rows() { + let gte = self.data.slice(split_point, self.num_rows() - split_point); + Some(Batch::try_new_from_batch(gte)?) + } else { + None + }; + Ok((lt, gte)) + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use arrow::datatypes::{DataType, Field}; + use itertools::Itertools; + use proptest::prelude::*; + use sparrow_core::TableSchema; + use sparrow_merge::old::InputItem; + + use super::*; + use sparrow_merge::old::testing::{arb_i64_array, arb_key_triples}; + + fn arb_batch(max_len: usize) -> impl Strategy { + (1..max_len) + .prop_flat_map(|len| (arb_key_triples(len), arb_i64_array(len))) + .prop_map(|((time, subsort, key_hash), values)| { + let schema = TableSchema::from_data_fields([Arc::new(Field::new( + "data", + DataType::Int64, + true, + ))]) + .unwrap(); + let schema = schema.schema_ref(); + RecordBatch::try_new( + schema.clone(), + vec![ + Arc::new(time), + Arc::new(subsort), + Arc::new(key_hash), + Arc::new(values), + ], + ) + .unwrap() + }) + } + + proptest! { + #[test] + fn test_splitting(batch in arb_batch(1000)) { + // For every time value in the batch, try splitting there and make sure + // the ordering constraints are satisfied. + let input = Batch::try_new_from_batch(batch).unwrap(); + let times = input.times().unwrap(); + + for split_time in times.iter().dedup() { + let (lt, gte) = input.clone().split_at(*split_time).unwrap(); + + if let Some(lt) = lt { + lt.times().unwrap().iter().all(|t| *t < *split_time); + } + if let Some(gte) = gte { + gte.times().unwrap().iter().all(|t| *t >= *split_time); + } + } + } + } +} diff --git a/crates/sparrow-runtime/src/execute.rs b/crates/sparrow-runtime/src/execute.rs index a64d58828..3b284df57 100644 --- a/crates/sparrow-runtime/src/execute.rs +++ b/crates/sparrow-runtime/src/execute.rs @@ -1,39 +1,37 @@ use std::sync::Arc; use chrono::NaiveDateTime; +use enum_map::EnumMap; use error_stack::{IntoReport, IntoReportCompat, ResultExt}; use futures::Stream; use prost_wkt_types::Timestamp; use sparrow_api::kaskada::v1alpha::execute_request::Limits; use sparrow_api::kaskada::v1alpha::{ - ComputePlan, ComputeTable, Destination, ExecuteRequest, ExecuteResponse, LateBoundValue, - PerEntityBehavior, + ComputePlan, ComputeSnapshotConfig, ComputeTable, ExecuteRequest, ExecuteResponse, + LateBoundValue, PerEntityBehavior, PlanHash, }; use sparrow_arrow::scalar_value::ScalarValue; use sparrow_compiler::{hash_compute_plan_proto, DataContext}; -use sparrow_instructions::ComputeStore; use sparrow_qfr::kaskada::sparrow::v1alpha::FlightRecordHeader; -use tracing::Instrument; +use crate::execute::compute_store_guard::ComputeStoreGuard; use crate::execute::error::Error; -use crate::execute::key_hash_inverse::{KeyHashInverse, ThreadSafeKeyHashInverse}; use crate::execute::operation::OperationContext; +use crate::execute::output::Destination; +use crate::key_hash_inverse::{KeyHashInverse, ThreadSafeKeyHashInverse}; use crate::stores::ObjectStoreRegistry; use crate::RuntimeOptions; mod checkpoints; mod compute_executor; +mod compute_store_guard; pub mod error; -pub(crate) mod key_hash_inverse; pub(crate) mod operation; pub mod output; mod progress_reporter; mod spawner; pub use compute_executor::*; -// The path prefix to the local compute store db. -const STORE_PATH_PREFIX: &str = "compute_snapshot_"; - /// The main method for executing a Fenl query. /// /// The `request` proto contains the execution plan as well as @@ -52,114 +50,135 @@ pub async fn execute( let destination = request .destination .ok_or(Error::MissingField("destination"))?; + let destination = + Destination::try_from(destination).change_context(Error::InvalidDestination)?; - let changed_since_time = request.changed_since.unwrap_or(Timestamp { - seconds: 0, - nanos: 0, - }); + let data_context = DataContext::try_from_tables(request.tables.to_vec()) + .into_report() + .change_context(Error::internal_msg("create data context"))?; - // Create and populate the late bindings. - // We don't use the `enum_map::enum_map!(...)` initialization because it would - // require looping over (and cloning) the scalar value unnecessarily. - let mut late_bindings = enum_map::enum_map! { - _ => None - }; - late_bindings[LateBoundValue::ChangedSinceTime] = Some(ScalarValue::timestamp( - changed_since_time.seconds, - changed_since_time.nanos, - None, - )); - - let output_at_time = if let Some(output_at_time) = request.final_result_time { - late_bindings[LateBoundValue::FinalAtTime] = Some(ScalarValue::timestamp( - output_at_time.seconds, - output_at_time.nanos, - None, - )); - Some(output_at_time) - } else { - late_bindings[LateBoundValue::FinalAtTime] = None; - None + let options = ExecutionOptions { + bounded_lateness_ns, + changed_since_time: request.changed_since.unwrap_or_default(), + final_at_time: request.final_result_time, + compute_snapshot_config: request.compute_snapshot_config, + limits: request.limits, + ..ExecutionOptions::default() }; - let mut data_context = DataContext::try_from_tables(request.tables.to_vec()) - .into_report() - .change_context(Error::internal_msg("create data context"))?; + // let output_at_time = request.final_result_time; - let object_stores = Arc::new(ObjectStoreRegistry::default()); + execute_new(plan, destination, data_context, options, None).await +} - // If the snapshot config exists, sparrow should attempt to resume from state, - // and store new state. Create a new storage path for the local store to - // exist. - let storage_dir = if let Some(config) = &request.compute_snapshot_config { - let dir = tempfile::Builder::new() - .prefix(&STORE_PATH_PREFIX) - .tempdir() - .into_report() - .change_context(Error::internal_msg("create snapshot dir"))?; - - // If a `resume_from` path is specified, download the existing state from s3. - if let Some(resume_from) = &config.resume_from { - checkpoints::download(resume_from, object_stores.as_ref(), dir.path(), config) - .instrument(tracing::info_span!("Downloading checkpoint files")) - .await - .change_context(Error::internal_msg("download snapshot"))?; - } else { - tracing::info!("No snapshot set to resume from. Using empty compute store."); +#[derive(Default, Debug)] +pub struct ExecutionOptions { + pub changed_since_time: Timestamp, + pub final_at_time: Option, + pub bounded_lateness_ns: Option, + pub compute_snapshot_config: Option, + pub limits: Option, + pub stop_signal_rx: Option>, + /// Maximum rows to emit in a single batch. + pub max_batch_size: Option, + /// If true, the execution is a materialization. + /// + /// It will subscribe to the input stream and continue running as new data + /// arrives. It won't send final ticks. + pub materialize: bool, +} + +impl ExecutionOptions { + pub fn late_bindings(&self) -> EnumMap> { + enum_map::enum_map! { + LateBoundValue::ChangedSinceTime => Some(ScalarValue::timestamp( + self.changed_since_time.seconds, + self.changed_since_time.nanos, + None, + )), + LateBoundValue::FinalAtTime => self.final_at_time.as_ref().map(|t| ScalarValue::timestamp( + t.seconds, + t.nanos, + None, + )), + _ => None } + } - Some(dir) - } else { - tracing::info!("No snapshot config; not creating compute store."); - None - }; + pub fn set_changed_since(&mut self, changed_since: Timestamp) { + self.changed_since_time = changed_since; + } - let plan_hash = hash_compute_plan_proto(&plan); + pub fn set_final_at_time(&mut self, final_at_time: Timestamp) { + self.final_at_time = Some(final_at_time); + } - let compute_store = if let Some(dir) = &storage_dir { - let max_allowed_max_event_time = match plan.per_entity_behavior() { - PerEntityBehavior::Unspecified => { - error_stack::bail!(Error::UnspecifiedPerEntityBehavior) - } - PerEntityBehavior::All => { - // For all results, we need a snapshot with a maximum event time - // no larger than the changed_since time, since we need to replay - // (and recompute the results for) all events after the changed - // since time. - changed_since_time.clone() - } - PerEntityBehavior::Final => { - // This is a bit confusing. Right now, the manager is responsible for - // choosing a valid snapshot to resume from. Thus, the work of choosing - // a valid snapshot with regard to any new input data is already done. - // However, the engine does a sanity check here to ensure the snapshot's - // max event time is before the allowed max event time the engine supports, - // dependent on the entity behavior of the query. - // - // For FinalResults, the snapshot can have a max event time of "any time", - // so we set this to Timestamp::MAX. This is because we just need to be able - // to produce results once after all new events have been processed, and - // we can already assume a valid snapshot is chosen and the correct input - // files are being processed. - Timestamp { - seconds: i64::MAX, - nanos: i32::MAX, + async fn compute_store( + &self, + object_stores: &ObjectStoreRegistry, + per_entity_behavior: PerEntityBehavior, + plan_hash: &PlanHash, + ) -> error_stack::Result, Error> { + // If the snapshot config exists, sparrow should attempt to resume from state, + // and store new state. Create a new storage path for the local store to + // exist. + if let Some(config) = self.compute_snapshot_config.clone() { + let max_allowed_max_event_time = match per_entity_behavior { + PerEntityBehavior::Unspecified => { + error_stack::bail!(Error::UnspecifiedPerEntityBehavior) } - } - PerEntityBehavior::FinalAtTime => { - output_at_time.as_ref().expect("final at time").clone() - } - }; - - Some( - ComputeStore::try_new(dir.path(), &max_allowed_max_event_time, &plan_hash) - .into_report() - .change_context(Error::internal_msg("loading compute store"))?, - ) - } else { - None - }; + PerEntityBehavior::All => { + // For all results, we need a snapshot with a maximum event time + // no larger than the changed_since time, since we need to replay + // (and recompute the results for) all events after the changed + // since time. + self.changed_since_time.clone() + } + PerEntityBehavior::Final => { + // This is a bit confusing. Right now, the manager is responsible for + // choosing a valid snapshot to resume from. Thus, the work of choosing + // a valid snapshot with regard to any new input data is already done. + // However, the engine does a sanity check here to ensure the snapshot's + // max event time is before the allowed max event time the engine supports, + // dependent on the entity behavior of the query. + // + // For FinalResults, the snapshot can have a max event time of "any time", + // so we set this to Timestamp::MAX. This is because we just need to be able + // to produce results once after all new events have been processed, and + // we can already assume a valid snapshot is chosen and the correct input + // files are being processed. + Timestamp { + seconds: i64::MAX, + nanos: i32::MAX, + } + } + PerEntityBehavior::FinalAtTime => { + self.final_at_time.clone().expect("final at time") + } + }; + let guard = compute_store_guard::ComputeStoreGuard::try_new( + config, + object_stores, + max_allowed_max_event_time, + plan_hash, + ) + .await?; + + Ok(Some(guard)) + } else { + tracing::info!("No snapshot config; not creating compute store."); + Ok(None) + } + } +} + +async fn load_key_hash_inverse( + plan: &ComputePlan, + data_context: &mut DataContext, + compute_store: &Option, + object_stores: &ObjectStoreRegistry, +) -> error_stack::Result, Error> { let primary_grouping_key_type = plan .primary_grouping_key_type .to_owned() @@ -168,74 +187,110 @@ pub async fn execute( arrow::datatypes::DataType::try_from(&primary_grouping_key_type) .into_report() .change_context(Error::internal_msg("decode primary_grouping_key_type"))?; - let mut key_hash_inverse = KeyHashInverse::from_data_type(primary_grouping_key_type.clone()); - if let Some(compute_store) = compute_store.to_owned() { - if let Ok(restored) = KeyHashInverse::restore_from(&compute_store) { - key_hash_inverse = restored - } - } let primary_group_id = data_context .get_or_create_group_id(&plan.primary_grouping, &primary_grouping_key_type) .into_report() .change_context(Error::internal_msg("get primary grouping ID"))?; + let mut key_hash_inverse = KeyHashInverse::from_data_type(&primary_grouping_key_type.clone()); + if let Some(compute_store) = &compute_store { + if let Ok(restored) = KeyHashInverse::restore_from(compute_store.store_ref()) { + key_hash_inverse = restored + } + } + key_hash_inverse - .add_from_data_context(&data_context, primary_group_id, &object_stores) + .add_from_data_context(data_context, primary_group_id, object_stores) .await .change_context(Error::internal_msg("initialize key hash inverse"))?; let key_hash_inverse = Arc::new(ThreadSafeKeyHashInverse::new(key_hash_inverse)); + Ok(key_hash_inverse) +} + +/// Execute a given query based on the options. +/// +/// Parameters +/// ---------- +/// - key_hash_inverse: If set, specifies the key hash inverses to use. If None, the +/// key hashes will be created. +pub async fn execute_new( + plan: ComputePlan, + destination: Destination, + mut data_context: DataContext, + options: ExecutionOptions, + key_hash_inverse: Option>, +) -> error_stack::Result>, Error> { + let object_stores = Arc::new(ObjectStoreRegistry::default()); + + let plan_hash = hash_compute_plan_proto(&plan); + + let compute_store = options + .compute_store( + object_stores.as_ref(), + plan.per_entity_behavior(), + &plan_hash, + ) + .await?; + + let key_hash_inverse = if let Some(key_hash_inverse) = key_hash_inverse { + key_hash_inverse + } else { + load_key_hash_inverse(&plan, &mut data_context, &compute_store, &object_stores) + .await + .change_context(Error::internal_msg("load key hash inverse"))? + }; // Channel for the output stats. let (progress_updates_tx, progress_updates_rx) = tokio::sync::mpsc::channel(29.max(plan.operations.len() * 2)); - let output_datetime = if let Some(t) = output_at_time { - Some( + let output_at_time = options + .final_at_time + .as_ref() + .map(|t| { NaiveDateTime::from_timestamp_opt(t.seconds, t.nanos as u32) - .ok_or_else(|| Error::internal_msg("expected valid timestamp"))?, - ) - } else { - None - }; + .ok_or_else(|| Error::internal_msg("expected valid timestamp")) + }) + .transpose()?; - // We use the plan hash for validating the snapshot is as expected. - // Rather than accepting it as input (which could lead to us getting - // a correct hash but an incorrect plan) we re-hash the plan. let context = OperationContext { plan, - plan_hash, object_stores, data_context, - compute_store, + compute_store: compute_store.as_ref().map(|g| g.store()), key_hash_inverse, max_event_in_snapshot: None, progress_updates_tx, - output_at_time: output_datetime, - bounded_lateness_ns, + output_at_time, + bounded_lateness_ns: options.bounded_lateness_ns, + materialize: options.materialize, }; // Start executing the query. We pass the response channel to the // execution layer so it can periodically report progress. tracing::debug!("Starting query execution"); + let late_bindings = options.late_bindings(); let runtime_options = RuntimeOptions { - limits: request.limits.unwrap_or_default(), + limits: options.limits.unwrap_or_default(), flight_record_path: None, + max_batch_size: options.max_batch_size, }; let compute_executor = ComputeExecutor::try_spawn( context, + plan_hash, &late_bindings, &runtime_options, progress_updates_rx, destination, - None, + options.stop_signal_rx, ) .await .change_context(Error::internal_msg("spawn compute executor"))?; - Ok(compute_executor.execute_with_progress(storage_dir, request.compute_snapshot_config)) + Ok(compute_executor.execute_with_progress(compute_store)) } /// The main method for starting a materialization process. @@ -249,102 +304,27 @@ pub async fn materialize( bounded_lateness_ns: Option, stop_signal_rx: tokio::sync::watch::Receiver, ) -> error_stack::Result>, Error> { - // TODO: Unimplemented feature - changed_since_time - let changed_since_time = Timestamp { - seconds: 0, - nanos: 0, - }; - - // Create and populate the late bindings. - // We don't use the `enum_map::enum_map!(...)` initialization because it would - // require looping over (and cloning) the scalar value unnecessarily. - let mut late_bindings = enum_map::enum_map! { - _ => None + let options = ExecutionOptions { + bounded_lateness_ns, + // TODO: Unimplemented feature - changed_since_time + changed_since_time: Timestamp { + seconds: 0, + nanos: 0, + }, + // Unsupported: not allowed to materialize at a specific time + final_at_time: None, + // TODO: Resuming from state is unimplemented + compute_snapshot_config: None, + stop_signal_rx: Some(stop_signal_rx), + ..ExecutionOptions::default() }; - late_bindings[LateBoundValue::ChangedSinceTime] = Some(ScalarValue::timestamp( - changed_since_time.seconds, - changed_since_time.nanos, - None, - )); - // Unsupported: not allowed to materialize at a specific time - late_bindings[LateBoundValue::FinalAtTime] = None; - let output_at_time = None; - - let mut data_context = DataContext::try_from_tables(tables) + let data_context = DataContext::try_from_tables(tables) .into_report() .change_context(Error::internal_msg("create data context"))?; - // TODO: Resuming from state is unimplemented - let storage_dir = None; - let snapshot_compute_store = None; - - let plan_hash = hash_compute_plan_proto(&plan); - - let primary_grouping_key_type = plan - .primary_grouping_key_type - .to_owned() - .ok_or(Error::MissingField("primary_grouping_key_type"))?; - let primary_grouping_key_type = - arrow::datatypes::DataType::try_from(&primary_grouping_key_type) - .into_report() - .change_context(Error::internal_msg("decode primary_grouping_key_type"))?; - let mut key_hash_inverse = KeyHashInverse::from_data_type(primary_grouping_key_type.clone()); - - let primary_group_id = data_context - .get_or_create_group_id(&plan.primary_grouping, &primary_grouping_key_type) - .into_report() - .change_context(Error::internal_msg("get primary grouping ID"))?; - - let object_stores = Arc::new(ObjectStoreRegistry::default()); - key_hash_inverse - .add_from_data_context(&data_context, primary_group_id, &object_stores) - .await - .change_context(Error::internal_msg("initialize key hash inverse"))?; - let key_hash_inverse = Arc::new(ThreadSafeKeyHashInverse::new(key_hash_inverse)); - - // Channel for the output stats. - let (progress_updates_tx, progress_updates_rx) = - tokio::sync::mpsc::channel(29.max(plan.operations.len() * 2)); - - // We use the plan hash for validating the snapshot is as expected. - // Rather than accepting it as input (which could lead to us getting - // a correct hash but an incorrect plan) we re-hash the plan. - let context = OperationContext { - plan, - plan_hash, - object_stores, - data_context, - compute_store: snapshot_compute_store, - key_hash_inverse, - max_event_in_snapshot: None, - progress_updates_tx, - output_at_time, - bounded_lateness_ns, - }; - - // Start executing the query. We pass the response channel to the - // execution layer so it can periodically report progress. - tracing::debug!("Starting query execution"); - - let runtime_options = RuntimeOptions { - limits: Limits::default(), - flight_record_path: None, - }; - - let compute_executor = ComputeExecutor::try_spawn( - context, - &late_bindings, - &runtime_options, - progress_updates_rx, - destination, - Some(stop_signal_rx), - ) - .await - .change_context(Error::internal_msg("spawn compute executor"))?; - // TODO: the `execute_with_progress` method contains a lot of additional logic that is theoretically not needed, // as the materialization does not exit, and should not need to handle cleanup tasks that regular // queries do. We should likely refactor this to use a separate `materialize_with_progress` method. - Ok(compute_executor.execute_with_progress(storage_dir, None)) + execute_new(plan, destination, data_context, options, None).await } diff --git a/crates/sparrow-runtime/src/execute/compute_executor.rs b/crates/sparrow-runtime/src/execute/compute_executor.rs index 86099f4c9..479ee2ef6 100644 --- a/crates/sparrow-runtime/src/execute/compute_executor.rs +++ b/crates/sparrow-runtime/src/execute/compute_executor.rs @@ -7,19 +7,18 @@ use futures::stream::{FuturesUnordered, PollNext}; use futures::{FutureExt, Stream, TryFutureExt}; use prost_wkt_types::Timestamp; use sparrow_api::kaskada::v1alpha::ComputeSnapshot; -use sparrow_api::kaskada::v1alpha::ComputeSnapshotConfig; -use sparrow_api::kaskada::v1alpha::{self, ExecuteResponse, LateBoundValue, PlanHash}; +use sparrow_api::kaskada::v1alpha::{ExecuteResponse, LateBoundValue, PlanHash}; use sparrow_arrow::scalar_value::ScalarValue; -use sparrow_instructions::ComputeStore; use sparrow_qfr::io::writer::FlightRecordWriter; use sparrow_qfr::kaskada::sparrow::v1alpha::FlightRecordHeader; use sparrow_qfr::FlightRecorderFactory; -use tempfile::TempDir; use tokio_stream::wrappers::UnboundedReceiverStream; use tokio_stream::StreamExt; use tracing::{error, info, info_span, Instrument}; +use crate::execute::compute_store_guard::ComputeStoreGuard; use crate::execute::operation::{OperationContext, OperationExecutor}; +use crate::execute::output::Destination; use crate::execute::progress_reporter::{progress_stream, ProgressUpdate}; use crate::execute::spawner::ComputeTaskSpawner; use crate::execute::Error; @@ -30,7 +29,6 @@ use crate::{Batch, RuntimeOptions}; pub(crate) struct ComputeExecutor { object_stores: Arc, - compute_store: Option>, plan_hash: PlanHash, futures: FuturesUnordered>, progress_updates_rx: tokio::sync::mpsc::Receiver, @@ -52,10 +50,11 @@ impl ComputeExecutor { /// Spawns the compute tasks using the new operation based executor. pub async fn try_spawn( mut context: OperationContext, + plan_hash: PlanHash, late_bindings: &EnumMap>, runtime_options: &RuntimeOptions, progress_updates_rx: tokio::sync::mpsc::Receiver, - destination: v1alpha::Destination, + destination: Destination, stop_signal_rx: Option>, ) -> error_stack::Result { let mut spawner = ComputeTaskSpawner::new(); @@ -88,6 +87,7 @@ impl ComputeExecutor { futures::StreamExt::boxed(tokio_stream::wrappers::ReceiverStream::new(output_rx)), context.progress_updates_tx.clone(), destination, + runtime_options.max_batch_size, ) .change_context(Internal("error writing output"))? .map_err(|e| e.change_context(Internal("error writing output"))), @@ -154,8 +154,7 @@ impl ComputeExecutor { Ok(Self { object_stores: context.object_stores, - compute_store: context.compute_store, - plan_hash: context.plan_hash, + plan_hash, futures: spawner.finish(), progress_updates_rx, max_event_time_rx, @@ -166,14 +165,12 @@ impl ComputeExecutor { /// /// The `finish` function is called after the final compute result has been /// created, but before progress information stops being streamed. - pub fn execute_with_progress( + pub(super) fn execute_with_progress( self, - storage_dir: Option, - compute_snapshot_config: Option, + store: Option, ) -> impl Stream> { let Self { object_stores, - compute_store, plan_hash, futures, progress_updates_rx, @@ -200,40 +197,16 @@ impl ComputeExecutor { }; let compute_result = compute_result.expect("ok"); - if let Some(compute_store) = compute_store { - // Write the max input time to the store. - if let Err(e) = compute_store - .put_max_event_time(&compute_result.max_input_timestamp) - .into_report() - { - return ProgressUpdate::ExecutionFailed { - error: e - .change_context(Error::Internal("failed to report max event time")), - }; - } - - // Now that everything has completed, we attempt to get the compute store out. - // This lets us explicitly drop the store here. - match Arc::try_unwrap(compute_store) { - Ok(owned_compute_store) => std::mem::drop(owned_compute_store), - Err(_) => panic!("unable to reclaim compute store"), - }; - } - - let compute_snapshots = upload_compute_snapshots( - object_stores, - storage_dir, - compute_snapshot_config, - compute_result, - ) - .instrument(tracing::info_span!("Uploading checkpoint files")) - .await - .unwrap_or_else(|e| { - // Log, but don't fail if we couldn't upload snapshots. - // We can still produce valid answers, but won't perform an incremental query. - error!("Failed to upload compute snapshot(s):\n{:?}", e); - Vec::new() - }); + let compute_snapshots = + upload_compute_snapshots(object_stores.as_ref(), store, compute_result) + .instrument(tracing::info_span!("Uploading checkpoint files")) + .await + .unwrap_or_else(|e| { + // Log, but don't fail if we couldn't upload snapshots. + // We can still produce valid answers, but won't perform an incremental query. + error!("Failed to upload compute snapshot(s):\n{:?}", e); + Vec::new() + }); Ok(ProgressUpdate::ExecutionComplete { compute_snapshots }) }; @@ -269,30 +242,14 @@ fn select_biased( } async fn upload_compute_snapshots( - object_stores: Arc, - storage_dir: Option, - compute_snapshot_config: Option, + object_stores: &ObjectStoreRegistry, + store: Option, compute_result: ComputeResult, ) -> error_stack::Result, Error> { let mut snapshots = Vec::new(); - // If a snapshot config exists, let's assume for now that this - // indicates we want to upload snapshots. - // - // There may be situations where we want to resume from a snapshot, - // but not upload new snapshots. - if let Some(snapshot_config) = compute_snapshot_config { - let storage_dir = storage_dir.ok_or(Error::Internal("missing storage dir"))?; - - let snapshot_metadata = super::checkpoints::upload( - object_stores.as_ref(), - storage_dir, - snapshot_config, - compute_result, - ) - .await - .change_context(Error::Internal("uploading snapshot"))?; - snapshots.push(snapshot_metadata); + if let Some(store) = store { + snapshots.push(store.finish(object_stores, compute_result).await?); } else { tracing::info!("No snapshot config; not uploading compute store.") } diff --git a/crates/sparrow-runtime/src/execute/compute_store_guard.rs b/crates/sparrow-runtime/src/execute/compute_store_guard.rs new file mode 100644 index 000000000..8ce574afc --- /dev/null +++ b/crates/sparrow-runtime/src/execute/compute_store_guard.rs @@ -0,0 +1,82 @@ +use std::sync::Arc; + +use error_stack::{IntoReport, IntoReportCompat, ResultExt}; +use prost_wkt_types::Timestamp; +use sparrow_api::kaskada::v1alpha::{ComputeSnapshot, ComputeSnapshotConfig, PlanHash}; +use sparrow_instructions::ComputeStore; +use tempfile::TempDir; +use tracing::Instrument; + +use crate::execute::error::Error; +use crate::execute::{checkpoints, ComputeResult}; +use crate::stores::ObjectStoreRegistry; + +pub(super) struct ComputeStoreGuard { + dir: TempDir, + store: Arc, + config: ComputeSnapshotConfig, +} + +// The path prefix to the local compute store db. +const STORE_PATH_PREFIX: &str = "compute_snapshot_"; + +impl ComputeStoreGuard { + pub async fn try_new( + config: ComputeSnapshotConfig, + object_stores: &ObjectStoreRegistry, + max_allowed_max_event_time: Timestamp, + plan_hash: &PlanHash, + ) -> error_stack::Result { + let dir = tempfile::Builder::new() + .prefix(&STORE_PATH_PREFIX) + .tempdir() + .into_report() + .change_context(Error::internal_msg("create snapshot dir"))?; + + // If a `resume_from` path is specified, download the existing state from s3. + if let Some(resume_from) = &config.resume_from { + checkpoints::download(resume_from, object_stores, dir.path(), &config) + .instrument(tracing::info_span!("Downloading checkpoint files")) + .await + .change_context(Error::internal_msg("download snapshot"))?; + } else { + tracing::info!("No snapshot set to resume from. Using empty compute store."); + } + + let store = ComputeStore::try_new(dir.path(), &max_allowed_max_event_time, plan_hash) + .into_report() + .change_context(Error::internal_msg("loading compute store"))?; + Ok(Self { dir, store, config }) + } + + pub async fn finish( + self, + object_stores: &ObjectStoreRegistry, + compute_result: ComputeResult, + ) -> error_stack::Result { + // Write the max input time to the store. + self.store + .put_max_event_time(&compute_result.max_input_timestamp) + .into_report() + .change_context(Error::Internal("failed to report max event time"))?; + + // Now that everything has completed, we attempt to get the compute store out. + // This lets us explicitly drop the store here. + match Arc::try_unwrap(self.store) { + Ok(owned_compute_store) => std::mem::drop(owned_compute_store), + Err(_) => panic!("unable to reclaim compute store"), + }; + + super::checkpoints::upload(object_stores, self.dir, self.config, compute_result) + .await + .change_context(Error::Internal("uploading snapshot")) + } + + pub fn store(&self) -> Arc { + self.store.clone() + } + + pub fn store_ref(&self) -> &ComputeStore { + self.store.as_ref() + } +} diff --git a/crates/sparrow-runtime/src/execute/error.rs b/crates/sparrow-runtime/src/execute/error.rs index f29f85e4f..e3a17aeba 100644 --- a/crates/sparrow-runtime/src/execute/error.rs +++ b/crates/sparrow-runtime/src/execute/error.rs @@ -12,6 +12,8 @@ pub enum Error { Internal(&'static str), #[display(fmt = "invalid operation: {_0}")] InvalidOperation(String), + #[display(fmt = "invalid destination")] + InvalidDestination, #[display(fmt = "failed to pre-process next input for operation")] PreprocessNextInput, #[display(fmt = "output '{output}' is not supported")] diff --git a/crates/sparrow-runtime/src/execute/operation.rs b/crates/sparrow-runtime/src/execute/operation.rs index 96edac38e..600cb8abb 100644 --- a/crates/sparrow-runtime/src/execute/operation.rs +++ b/crates/sparrow-runtime/src/execute/operation.rs @@ -44,9 +44,7 @@ use error_stack::{IntoReport, IntoReportCompat, Report, Result, ResultExt}; use futures::Future; use prost_wkt_types::Timestamp; use sparrow_api::kaskada::v1alpha::operation_plan::tick_operation::TickBehavior; -use sparrow_api::kaskada::v1alpha::{ - operation_plan, ComputePlan, LateBoundValue, OperationPlan, PlanHash, -}; +use sparrow_api::kaskada::v1alpha::{operation_plan, ComputePlan, LateBoundValue, OperationPlan}; use sparrow_arrow::scalar_value::ScalarValue; use sparrow_compiler::DataContext; use sparrow_instructions::ComputeStore; @@ -62,10 +60,10 @@ use self::scan::ScanOperation; use self::select::SelectOperation; use self::tick::TickOperation; use self::with_key::WithKeyOperation; -use crate::execute::key_hash_inverse::ThreadSafeKeyHashInverse; use crate::execute::operation::expression_executor::{ExpressionExecutor, InputColumn}; use crate::execute::operation::shift_until::ShiftUntilOperation; use crate::execute::Error; +use crate::key_hash_inverse::ThreadSafeKeyHashInverse; use crate::stores::ObjectStoreRegistry; use crate::Batch; @@ -80,7 +78,6 @@ use crate::Batch; /// the method to create a table reader on to it. pub(crate) struct OperationContext { pub plan: ComputePlan, - pub plan_hash: PlanHash, pub object_stores: Arc, pub data_context: DataContext, pub compute_store: Option>, @@ -99,6 +96,13 @@ pub(crate) struct OperationContext { /// /// If not set, defaults to the [BOUNDED_LATENESS_NS] const. pub bounded_lateness_ns: Option, + /// If true, the execution is a materialization. + /// + /// It will subscribe to the input stream and continue running as new data + /// arrives. It won't send final ticks. + /// + /// Derived from the ExecutionOptions, + pub materialize: bool, } impl OperationContext { diff --git a/crates/sparrow-runtime/src/execute/operation/expression_executor.rs b/crates/sparrow-runtime/src/execute/operation/expression_executor.rs index c7d83b64b..75d1f4c28 100644 --- a/crates/sparrow-runtime/src/execute/operation/expression_executor.rs +++ b/crates/sparrow-runtime/src/execute/operation/expression_executor.rs @@ -10,11 +10,11 @@ use itertools::Itertools; use sparrow_api::kaskada::v1alpha::expression_plan::Operator; use sparrow_api::kaskada::v1alpha::{ExpressionPlan, LateBoundValue, OperationInputRef}; use sparrow_arrow::scalar_value::ScalarValue; +use sparrow_instructions::ValueRef; use sparrow_instructions::{ - create_evaluator, ColumnarValue, ComputeStore, Evaluator, GroupingIndices, RuntimeInfo, - StaticArg, StaticInfo, StoreKey, + create_evaluator, ColumnarValue, ComputeStore, Evaluator, GroupingIndices, InstKind, InstOp, + RuntimeInfo, StaticArg, StaticInfo, StoreKey, }; -use sparrow_plan::ValueRef; use crate::execute::operation::InputBatch; use crate::Batch; @@ -96,14 +96,14 @@ impl ExpressionExecutor { // TODO: This could probably be cleaned up, possibly by eliminating // `inst kind` entirely. let inst_kind = if inst == "field_ref" { - sparrow_plan::InstKind::FieldRef + InstKind::FieldRef } else if inst == "record" { - sparrow_plan::InstKind::Record + InstKind::Record } else if inst == "cast" { - sparrow_plan::InstKind::Cast(data_type.clone()) + InstKind::Cast(data_type.clone()) } else { - let inst_op = sparrow_plan::InstOp::from_str(&inst)?; - sparrow_plan::InstKind::Simple(inst_op) + let inst_op = InstOp::from_str(&inst)?; + InstKind::Simple(inst_op) }; let static_info = StaticInfo::new(&inst_kind, args, &data_type); @@ -309,10 +309,7 @@ impl WorkArea { // grouping, time, etc. We may be able to do something simpler using the // information from the operation input. impl RuntimeInfo for WorkArea { - fn value( - &self, - arg: &sparrow_plan::ValueRef, - ) -> anyhow::Result { + fn value(&self, arg: &ValueRef) -> anyhow::Result { match arg { ValueRef::Input(index) => Ok(ColumnarValue::Array( self.input_columns[*index as usize].clone(), @@ -332,11 +329,11 @@ impl RuntimeInfo for WorkArea { &self.grouping } - fn time_column(&self) -> sparrow_instructions::ColumnarValue { + fn time_column(&self) -> ColumnarValue { ColumnarValue::Array(self.time.clone()) } - fn storage(&self) -> Option<&sparrow_instructions::ComputeStore> { + fn storage(&self) -> Option<&ComputeStore> { todo!() } diff --git a/crates/sparrow-runtime/src/execute/operation/input_batch.rs b/crates/sparrow-runtime/src/execute/operation/input_batch.rs index b08c9e98b..4fca81aa2 100644 --- a/crates/sparrow-runtime/src/execute/operation/input_batch.rs +++ b/crates/sparrow-runtime/src/execute/operation/input_batch.rs @@ -7,7 +7,7 @@ use arrow::record_batch::RecordBatch; use sparrow_core::{KeyTriple, KeyTriples}; use sparrow_instructions::GroupingIndices; -use crate::merge::BinaryMergeInput; +use sparrow_merge::old::BinaryMergeInput; /// The input information an operation is running on. /// diff --git a/crates/sparrow-runtime/src/execute/operation/lookup_request.rs b/crates/sparrow-runtime/src/execute/operation/lookup_request.rs index 53fcb1735..af8010446 100644 --- a/crates/sparrow-runtime/src/execute/operation/lookup_request.rs +++ b/crates/sparrow-runtime/src/execute/operation/lookup_request.rs @@ -137,7 +137,8 @@ impl LookupRequestOperation { // Re-key to the foreign key and hash it. let foreign_key_hash = input.column(self.foreign_key_column).clone(); - let foreign_key_hash = sparrow_arrow::hash::hash(&foreign_key_hash)?; + let foreign_key_hash = + sparrow_arrow::hash::hash(&foreign_key_hash).map_err(|e| e.into_error())?; // Now, we need to determine how many actual foreign rows there are and collect // the requesting primary keys. @@ -289,9 +290,9 @@ mod tests { "#; insta::assert_snapshot!(run_test(input_json).await, @r###" - {"_key_hash":2359047937476779835,"_subsort":0,"_time":"1970-01-01T00:00:00.000002","e0":[1]} - {"_key_hash":10021492687541564645,"_subsort":1,"_time":"1970-01-01T00:00:00.000003","e0":[1]} - {"_key_hash":14956259290599888306,"_subsort":0,"_time":"1970-01-01T00:00:00.000004","e0":[1,2]} + {"_key_hash":18433805721903975440,"_subsort":0,"_time":"1970-01-01T00:00:00.000002","e0":[1]} + {"_key_hash":16461383214845928621,"_subsort":1,"_time":"1970-01-01T00:00:00.000003","e0":[1]} + {"_key_hash":5496774745203840792,"_subsort":0,"_time":"1970-01-01T00:00:00.000004","e0":[1,2]} "###) } @@ -307,10 +308,10 @@ mod tests { // This tests that multiple rows at the same time subort (but different key // hash) map to unique foreign rows, but in the same order. insta::assert_snapshot!(run_test(input_json).await, @r###" - {"_key_hash":2359047937476779835,"_subsort":0,"_time":"1970-01-01T00:00:00.000002","e0":[1]} - {"_key_hash":10021492687541564645,"_subsort":1,"_time":"1970-01-01T00:00:00.000003","e0":[1]} - {"_key_hash":1575016611515860288,"_subsort":0,"_time":"1970-01-01T00:00:00.000004","e0":[2]} - {"_key_hash":14956259290599888306,"_subsort":0,"_time":"1970-01-01T00:00:00.000004","e0":[1]} + {"_key_hash":18433805721903975440,"_subsort":0,"_time":"1970-01-01T00:00:00.000002","e0":[1]} + {"_key_hash":16461383214845928621,"_subsort":1,"_time":"1970-01-01T00:00:00.000003","e0":[1]} + {"_key_hash":2694864431690786590,"_subsort":0,"_time":"1970-01-01T00:00:00.000004","e0":[2]} + {"_key_hash":5496774745203840792,"_subsort":0,"_time":"1970-01-01T00:00:00.000004","e0":[1]} "###) } @@ -326,10 +327,10 @@ mod tests { // This tests that multiple rows at the same time subort (but different key // hash) map to unique foreign rows, but in a different order. insta::assert_snapshot!(run_test(input_json).await, @r###" - {"_key_hash":2359047937476779835,"_subsort":0,"_time":"1970-01-01T00:00:00.000002","e0":[1]} - {"_key_hash":10021492687541564645,"_subsort":1,"_time":"1970-01-01T00:00:00.000003","e0":[1]} - {"_key_hash":1575016611515860288,"_subsort":0,"_time":"1970-01-01T00:00:00.000004","e0":[1]} - {"_key_hash":14956259290599888306,"_subsort":0,"_time":"1970-01-01T00:00:00.000004","e0":[2]} + {"_key_hash":18433805721903975440,"_subsort":0,"_time":"1970-01-01T00:00:00.000002","e0":[1]} + {"_key_hash":16461383214845928621,"_subsort":1,"_time":"1970-01-01T00:00:00.000003","e0":[1]} + {"_key_hash":2694864431690786590,"_subsort":0,"_time":"1970-01-01T00:00:00.000004","e0":[1]} + {"_key_hash":5496774745203840792,"_subsort":0,"_time":"1970-01-01T00:00:00.000004","e0":[2]} "###) } @@ -345,10 +346,14 @@ mod tests { expressions: vec![ExpressionPlan { arguments: vec![], result_type: Some(v1alpha::DataType { - kind: Some(data_type::Kind::List(Box::new(v1alpha::DataType { - kind: Some(data_type::Kind::Primitive( - data_type::PrimitiveType::U64 as i32, - )), + kind: Some(data_type::Kind::List(Box::new(data_type::List { + name: "item".to_owned(), + item_type: Some(Box::new(v1alpha::DataType { + kind: Some(data_type::Kind::Primitive( + data_type::PrimitiveType::U64 as i32, + )), + })), + nullable: true, }))), }), output: true, diff --git a/crates/sparrow-runtime/src/execute/operation/lookup_response.rs b/crates/sparrow-runtime/src/execute/operation/lookup_response.rs index f0b9949d2..854c7709a 100644 --- a/crates/sparrow-runtime/src/execute/operation/lookup_response.rs +++ b/crates/sparrow-runtime/src/execute/operation/lookup_response.rs @@ -1,13 +1,13 @@ use std::sync::Arc; use anyhow::Context; -use arrow::array::{Array, ListArray, UInt32Array, UInt64Array}; +use arrow::array::{Array, AsArray, ListArray, UInt32Array, UInt64Array}; use async_trait::async_trait; use error_stack::{IntoReport, IntoReportCompat, ResultExt}; use futures::StreamExt; use itertools::Itertools; use sparrow_api::kaskada::v1alpha::operation_plan; -use sparrow_arrow::downcast::{downcast_list_array, downcast_primitive_array}; +use sparrow_arrow::downcast::downcast_primitive_array; use sparrow_instructions::ComputeStore; use tokio_stream::wrappers::ReceiverStream; @@ -111,8 +111,7 @@ impl LookupResponseOperation { return Ok(None); } - let requesting_key_hash_list: &ListArray = - downcast_list_array(requesting_key_hash_list.as_ref())?; + let requesting_key_hash_list: &ListArray = requesting_key_hash_list.as_ref().as_list(); let requesting_key_hashes = requesting_key_hash_list.values(); let requesting_key_hashes: &UInt64Array = downcast_primitive_array(requesting_key_hashes.as_ref())?; diff --git a/crates/sparrow-runtime/src/execute/operation/merge.rs b/crates/sparrow-runtime/src/execute/operation/merge.rs index d70d2ebf1..a3560f9c0 100644 --- a/crates/sparrow-runtime/src/execute/operation/merge.rs +++ b/crates/sparrow-runtime/src/execute/operation/merge.rs @@ -1,5 +1,12 @@ use std::sync::Arc; +use super::BoxedOperation; +use crate::execute::operation::expression_executor::InputColumn; +use crate::execute::operation::spread::Spread; +use crate::execute::operation::{InputBatch, Operation}; +use crate::execute::Error; +use crate::key_hash_index::KeyHashIndex; +use crate::Batch; use anyhow::Context; use arrow::array::{ArrayRef, BooleanArray}; use arrow::datatypes::DataType; @@ -13,17 +20,9 @@ use sparrow_api::kaskada::v1alpha::operation_plan; use sparrow_arrow::downcast::downcast_primitive_array; use sparrow_core::{KeyTriple, KeyTriples}; use sparrow_instructions::{ComputeStore, GroupingIndices, StoreKey}; +use sparrow_merge::old::{binary_merge, BinaryMergeInput}; use tokio_stream::wrappers::ReceiverStream; -use super::BoxedOperation; -use crate::execute::operation::expression_executor::InputColumn; -use crate::execute::operation::spread::Spread; -use crate::execute::operation::{InputBatch, Operation}; -use crate::execute::Error; -use crate::key_hash_index::KeyHashIndex; -use crate::merge::{binary_merge, BinaryMergeInput}; -use crate::Batch; - #[derive(Debug)] pub(super) struct MergeOperation { /// Indices (and side) of columns needed within the merge operation. diff --git a/crates/sparrow-runtime/src/execute/operation/scan.rs b/crates/sparrow-runtime/src/execute/operation/scan.rs index 0e66f7354..86fc92d5f 100644 --- a/crates/sparrow-runtime/src/execute/operation/scan.rs +++ b/crates/sparrow-runtime/src/execute/operation/scan.rs @@ -10,7 +10,7 @@ use itertools::Itertools; use sparrow_api::kaskada::v1alpha::{self, operation_input_ref, operation_plan}; use sparrow_arrow::downcast::downcast_primitive_array; use sparrow_instructions::ComputeStore; -use sparrow_plan::TableId; +use sparrow_instructions::TableId; use sparrow_qfr::FlightRecorder; use super::BoxedOperation; @@ -157,6 +157,59 @@ impl ScanOperation { ))?, ); + if let Some(in_memory) = &table_info.in_memory { + // Hacky. When doing the Python-builder, use the in-memory batch. + // Ideally, this would be merged with the contents of the file. + // Bonus points if it deduplicates. That would allow us to use the + // in-memory batch as the "hot-store" for history+stream hybrid + // queries. + assert!(requested_slice.is_none()); + + // TODO: Consider stoppable batch scans (and queries). + let input_stream = if context.materialize { + in_memory + .subscribe() + .map_err(|e| e.change_context(Error::internal_msg("invalid input"))) + .and_then(|batch| async move { + Batch::try_new_from_batch(batch) + .into_report() + .change_context(Error::internal_msg("invalid input")) + }) + // TODO: Share this code / unify it with other scans. + .take_until(async move { + let mut stop_signal_rx = + stop_signal_rx.expect("stop signal for use with materialization"); + while !*stop_signal_rx.borrow() { + match stop_signal_rx.changed().await { + Ok(_) => (), + Err(e) => { + tracing::error!( + "stop signal receiver dropped unexpectedly: {:?}", + e + ); + break; + } + } + } + }) + .boxed() + } else { + let batch = in_memory.current(); + futures::stream::once(async move { + Batch::try_new_from_batch(batch) + .into_report() + .change_context(Error::internal_msg("invalid input")) + }) + .boxed() + }; + return Ok(Box::new(Self { + projected_schema, + input_stream, + key_hash_index: KeyHashIndex::default(), + progress_updates_tx: context.progress_updates_tx.clone(), + })); + } + // Figure out the projected columns from the table schema. // // TODO: Can we clean anything up by changing the table reader API @@ -332,16 +385,16 @@ mod tests { use sparrow_api::kaskada::v1alpha::{self, data_type}; use sparrow_api::kaskada::v1alpha::{ expression_plan, literal, operation_plan, ComputePlan, ComputeTable, ExpressionPlan, - Literal, OperationInputRef, OperationPlan, PlanHash, PreparedFile, SlicePlan, TableConfig, + Literal, OperationInputRef, OperationPlan, PreparedFile, SlicePlan, TableConfig, TableMetadata, }; use sparrow_arrow::downcast::downcast_primitive_array; use sparrow_compiler::DataContext; use uuid::Uuid; - use crate::execute::key_hash_inverse::{KeyHashInverse, ThreadSafeKeyHashInverse}; use crate::execute::operation::testing::batches_to_csv; use crate::execute::operation::{OperationContext, OperationExecutor}; + use crate::key_hash_inverse::ThreadSafeKeyHashInverse; use crate::read::testing::write_parquet_file; use crate::stores::ObjectStoreRegistry; @@ -449,8 +502,7 @@ mod tests { })), }; - let key_hash_inverse = KeyHashInverse::from_data_type(DataType::Utf8); - let key_hash_inverse = Arc::new(ThreadSafeKeyHashInverse::new(key_hash_inverse)); + let key_hash_inverse = Arc::new(ThreadSafeKeyHashInverse::from_data_type(&DataType::Utf8)); let (max_event_tx, mut max_event_rx) = tokio::sync::mpsc::unbounded_channel(); let (sender, receiver) = tokio::sync::mpsc::channel(10); @@ -464,7 +516,6 @@ mod tests { operations: vec![plan], ..ComputePlan::default() }, - plan_hash: PlanHash::default(), object_stores: Arc::new(ObjectStoreRegistry::default()), data_context, compute_store: None, @@ -473,6 +524,7 @@ mod tests { progress_updates_tx, output_at_time: None, bounded_lateness_ns: None, + materialize: false, }; executor diff --git a/crates/sparrow-runtime/src/execute/operation/shift_to.rs b/crates/sparrow-runtime/src/execute/operation/shift_to.rs index ce22da3d7..4ae0177eb 100644 --- a/crates/sparrow-runtime/src/execute/operation/shift_to.rs +++ b/crates/sparrow-runtime/src/execute/operation/shift_to.rs @@ -430,7 +430,7 @@ impl ShiftToColumnOperation { self.pending = Some(if let Some(right) = self.pending.take() { let left = input; let merge_result = - crate::merge::binary_merge(left.as_merge_input()?, right.as_merge_input()?)?; + sparrow_merge::old::binary_merge(left.as_merge_input()?, right.as_merge_input()?)?; // TODO: Binary merge optimization opportunity. // HACK: We'd like the binary merge to return a boolean array `BooleanArray` diff --git a/crates/sparrow-runtime/src/execute/operation/spread.rs b/crates/sparrow-runtime/src/execute/operation/spread.rs index ce6467ef9..0ca1a35f3 100644 --- a/crates/sparrow-runtime/src/execute/operation/spread.rs +++ b/crates/sparrow-runtime/src/execute/operation/spread.rs @@ -4,15 +4,14 @@ use std::sync::Arc; use anyhow::Context; use arrow::array::{ new_null_array, Array, ArrayData, ArrayRef, BooleanArray, BooleanBufferBuilder, - GenericStringArray, GenericStringBuilder, Int32BufferBuilder, ListArray, MapArray, - OffsetSizeTrait, PrimitiveArray, PrimitiveBuilder, StringArray, StringBuilder, StructArray, + GenericStringArray, GenericStringBuilder, Int32Builder, OffsetSizeTrait, PrimitiveArray, + PrimitiveBuilder, StructArray, UInt32Array, UInt32Builder, }; use arrow::datatypes::{self, ArrowPrimitiveType, DataType, Fields}; use bitvec::vec::BitVec; use itertools::{izip, Itertools}; use sparrow_arrow::downcast::{ - downcast_boolean_array, downcast_list_array, downcast_map_array, downcast_primitive_array, - downcast_string_array, downcast_struct_array, + downcast_boolean_array, downcast_primitive_array, downcast_string_array, downcast_struct_array, }; use sparrow_arrow::utils::make_null_array; use sparrow_instructions::GroupingIndices; @@ -43,7 +42,7 @@ impl<'de> serde::Deserialize<'de> for Spread { let e = SerializedSpread::deserialize(deserializer)?; let Some(spread_impl) = e.into_spread_impl() else { use serde::de::Error; - return Err(D::Error::custom("expected owned")) + return Err(D::Error::custom("expected owned")); }; Ok(Self { spread_impl }) @@ -167,10 +166,10 @@ enum SerializedSpread<'a> { UnlatchedString(Boo<'a, UnlatchedStringSpread>), LatchedLargeString(Boo<'a, LatchedStringSpread>), UnlatchedLargeString(Boo<'a, UnlatchedStringSpread>), - UnlatchedUInt64List(Boo<'a, UnlatchedUInt64ListSpread>), - UnlatchedMap(Boo<'a, UnlatchedMapSpread>), LatchedStruct(Boo<'a, StructSpread>), UnlatchedStruct(Boo<'a, StructSpread>), + LatchedFallback(Boo<'a, LatchedFallbackSpread>), + UnlatchedFallback(Boo<'a, UnlatchedFallbackSpread>), } fn into_spread_impl(spread: Boo<'_, T>) -> Option> { @@ -246,10 +245,10 @@ impl<'a> SerializedSpread<'a> { SerializedSpread::UnlatchedString(spread) => into_spread_impl(spread), SerializedSpread::LatchedLargeString(spread) => into_spread_impl(spread), SerializedSpread::UnlatchedLargeString(spread) => into_spread_impl(spread), - SerializedSpread::UnlatchedUInt64List(spread) => into_spread_impl(spread), - SerializedSpread::UnlatchedMap(spread) => into_spread_impl(spread), SerializedSpread::LatchedStruct(spread) => into_spread_impl(spread), SerializedSpread::UnlatchedStruct(spread) => into_spread_impl(spread), + SerializedSpread::LatchedFallback(spread) => into_spread_impl(spread), + SerializedSpread::UnlatchedFallback(spread) => into_spread_impl(spread), } } } @@ -347,23 +346,13 @@ impl Spread { Box::new(StructSpread::try_new_unlatched(fields)?) } } - DataType::Map(_, _) => { - anyhow::ensure!(!latched, "Latched map spread not supported"); - Box::new(UnlatchedMapSpread) - } - DataType::List(field) => { - anyhow::ensure!(!latched, "Latched list spread not supported"); - anyhow::ensure!( - field.data_type() == &DataType::UInt64, - "Unsupported type {:?} for list spread", - field.data_type() - ); - Box::new(UnlatchedUInt64ListSpread) + fallback => { + if latched { + Box::new(LatchedFallbackSpread::new(fallback)) + } else { + Box::new(UnlatchedFallbackSpread) + } } - unsupported => anyhow::bail!( - "Unsupported type for spread: {:?} (latched = {latched})", - unsupported - ), }; Ok(Self { spread_impl }) @@ -1262,10 +1251,10 @@ where values: &ArrayRef, signal: &BooleanArray, ) -> anyhow::Result { - let values: &StringArray = downcast_string_array(values.as_ref())?; + let values: &GenericStringArray = downcast_string_array(values.as_ref())?; let mut values = values.iter(); - let mut builder = StringBuilder::with_capacity(grouping.len(), 1024); + let mut builder = GenericStringBuilder::::with_capacity(grouping.len(), 1024); for signal in signal.iter() { match signal { Some(true) => builder.append_option(values.next().context("missing value")?), @@ -1288,9 +1277,9 @@ where fn spread_false( &mut self, grouping: &GroupingIndices, - _value_type: &DataType, + value_type: &DataType, ) -> anyhow::Result { - Ok(new_null_array(&DataType::Utf8, grouping.len())) + Ok(new_null_array(value_type, grouping.len())) } } @@ -1676,61 +1665,33 @@ pub(super) fn bit_run_iterator( } #[derive(serde::Serialize, serde::Deserialize, Debug)] -struct UnlatchedUInt64ListSpread; +struct UnlatchedFallbackSpread; -impl ToSerializedSpread for UnlatchedUInt64ListSpread { +impl ToSerializedSpread for UnlatchedFallbackSpread { fn to_serialized_spread(&self) -> SerializedSpread<'_> { - SerializedSpread::UnlatchedUInt64List(Boo::Borrowed(self)) + SerializedSpread::UnlatchedFallback(Boo::Borrowed(self)) } } -impl SpreadImpl for UnlatchedUInt64ListSpread { +impl SpreadImpl for UnlatchedFallbackSpread { fn spread_signaled( &mut self, grouping: &GroupingIndices, values: &ArrayRef, signal: &BooleanArray, ) -> anyhow::Result { - // This is a little tricky. Since we're an unlatched spread, we don't - // need to spread the underlying values (each item in the list will be - // referenced exactly once). Instead, we need to spread out the offset - // array. - - let values = downcast_list_array(values.as_ref())?; - let mut offset_builder = Int32BufferBuilder::new(grouping.len() + 1); - - let mut null_builder = BooleanBufferBuilder::new(grouping.len()); - - // Ensure the buffers are aligned to the offset. - offset_builder.append_n_zeroed(values.offset()); - null_builder.append_n(values.offset(), false); - - let mut offset_iter = values.value_offsets().iter(); - let mut offset = *offset_iter.next().context("missing offset")?; - offset_builder.append(offset); - - let mut index = 0; + let mut indices = Int32Builder::with_capacity(grouping.len()); + let mut next_index = 0; for signal in signal.iter() { - if matches!(signal, Some(true)) { - offset = *offset_iter.next().context("missing offset")?; - null_builder.append(values.is_valid(index)); - index += 1; + if signal.unwrap_or(false) { + indices.append_value(next_index); + next_index += 1; } else { - null_builder.append(false); + indices.append_null(); } - offset_builder.append(offset); } - - let data_builder = values.to_data().into_builder(); - let offset = offset_builder.finish(); - let array_data = data_builder - .len(grouping.len()) - .null_bit_buffer(Some(null_builder.finish().into_inner())) - .buffers(vec![offset]) - .build()?; - let result = ListArray::from(array_data); - - Ok(Arc::new(result)) + let indices = indices.finish(); + arrow::compute::take(values.as_ref(), &indices, None).context("failed to take values") } fn spread_true( @@ -1752,56 +1713,68 @@ impl SpreadImpl for UnlatchedUInt64ListSpread { } #[derive(serde::Serialize, serde::Deserialize, Debug)] -struct UnlatchedMapSpread; +struct LatchedFallbackSpread { + // The index of the group is used as the index in the data. + #[serde(with = "sparrow_arrow::serde::array_ref")] + data: ArrayRef, +} + +impl LatchedFallbackSpread { + fn new(data_type: &DataType) -> Self { + // TODO: Upgrade to arrow>=45 and this can be `make_empty(data_type)` + let data = ArrayData::new_empty(data_type); + let data = arrow::array::make_array(data); + + Self { data } + } +} -impl ToSerializedSpread for UnlatchedMapSpread { +impl ToSerializedSpread for LatchedFallbackSpread { fn to_serialized_spread(&self) -> SerializedSpread<'_> { - SerializedSpread::UnlatchedMap(Boo::Borrowed(self)) + SerializedSpread::LatchedFallback(Boo::Borrowed(self)) } } -impl SpreadImpl for UnlatchedMapSpread { +impl SpreadImpl for LatchedFallbackSpread { fn spread_signaled( &mut self, grouping: &GroupingIndices, values: &ArrayRef, signal: &BooleanArray, ) -> anyhow::Result { - let map_values = downcast_map_array(values.as_ref())?; - - let mut offset_builder = Int32BufferBuilder::new(grouping.len() + 1); - let mut null_builder = BooleanBufferBuilder::new(grouping.len()); - - // Ensure the buffers are aligned to the offset. - offset_builder.append_n_zeroed(values.offset()); - null_builder.append_n(values.offset(), false); - - let mut offset_iter = map_values.value_offsets().iter(); - let mut offset = *offset_iter.next().context("missing offset")?; - offset_builder.append(offset); + debug_assert_eq!(grouping.len(), signal.len()); + anyhow::ensure!(self.data.len() <= grouping.num_groups()); + + // TODO: We could do this using a separate null buffer and value buffer. + // This would allow us to avoid copying the data from this vector to the + // data buffers for `take`. + let mut state_take_indices: Vec> = (0..grouping.num_groups()) + .map(|index| { + if index < self.data.len() { + Some(index as u32) + } else { + None + } + }) + .collect(); - let mut index = 0; - for signal in signal.iter() { - if matches!(signal, Some(true)) { - offset = *offset_iter.next().context("missing offset")?; - null_builder.append(values.is_valid(index)); - index += 1; + let mut indices = UInt32Builder::with_capacity(grouping.len()); + let mut next_index = self.data.len() as u32; + for (signal, group) in signal.iter().zip(grouping.group_iter()) { + if signal.unwrap_or(false) { + indices.append_value(next_index); + state_take_indices[group] = Some(next_index); + next_index += 1; } else { - null_builder.append(false); + indices.append_option(state_take_indices[group]); } - offset_builder.append(offset); } + let indices = indices.finish(); - let data_builder = values.to_data().into_builder(); - let offset = offset_builder.finish(); - let array_data = data_builder - .len(grouping.len()) - .null_bit_buffer(Some(null_builder.finish().into_inner())) - .buffers(vec![offset]) - .build()?; - let result = MapArray::from(array_data); - - Ok(Arc::new(result)) + let state_take_indices = UInt32Array::from(state_take_indices); + let result = sparrow_arrow::concat_take(&self.data, values, &indices)?; + self.data = sparrow_arrow::concat_take(&self.data, values, &state_take_indices)?; + Ok(result) } fn spread_true( @@ -1810,15 +1783,40 @@ impl SpreadImpl for UnlatchedMapSpread { values: &ArrayRef, ) -> anyhow::Result { anyhow::ensure!(grouping.len() == values.len()); + anyhow::ensure!(self.data.len() <= grouping.num_groups()); + + // TODO: We could do this using a separate null buffer and value buffer. + // This would allow us to avoid copying the data from this vector to the + // data buffers for `take`. + let mut state_take_indices: Vec> = (0..grouping.num_groups()) + .map(|index| { + if index < self.data.len() { + Some(index as u32) + } else { + None + } + }) + .collect(); + + // This will update the new_state_indices to the last value for each group. + // This will null-out the data if the value is null at that point, so we + // don't need to hadle that case specially. + for (index, group) in grouping.group_iter().enumerate() { + state_take_indices[group] = Some((index + self.data.len()) as u32) + } + let state_take_indices = UInt32Array::from(state_take_indices); + self.data = sparrow_arrow::concat_take(&self.data, values, &state_take_indices)?; + Ok(values.clone()) } fn spread_false( &mut self, grouping: &GroupingIndices, - value_type: &DataType, + _value_type: &DataType, ) -> anyhow::Result { - Ok(new_null_array(value_type, grouping.len())) + arrow::compute::take(self.data.as_ref(), grouping.group_indices(), None) + .context("failed to take values") } } @@ -2209,6 +2207,44 @@ mod tests { ); } + #[test] + fn test_large_string_unlatched() { + let nums = LargeStringArray::from(vec![ + Some("5"), + Some("8"), + None, + Some("10"), + None, + Some("12"), + ]); + let result = run_spread( + Arc::new(nums), + vec![0, 1, 2, 0, 1, 2, 0, 1, 2, 0, 0], + vec![ + false, true, false, true, false, true, false, true, false, true, false, + ], + false, + ); + let result: &LargeStringArray = downcast_string_array(result.as_ref()).unwrap(); + + assert_eq!( + result, + &LargeStringArray::from(vec![ + None, + Some("5"), + None, + Some("8"), + None, + None, + None, + Some("10"), + None, + None, + None + ]) + ); + } + #[test] fn test_unlatched_uint64_list_spread() { let data = vec![ @@ -2242,6 +2278,39 @@ mod tests { assert_eq!(&result, &expected) } + #[test] + fn test_latched_uint64_list_spread() { + let data = vec![ + Some(vec![]), + None, + Some(vec![Some(3), Some(5), Some(19)]), + Some(vec![Some(6)]), + ]; + let list_array = ListArray::from_iter_primitive::(data); + + let result = run_spread( + Arc::new(list_array), + vec![0, 1, 0, 0, 0, 0, 0, 0], + vec![true, false, false, true, false, true, false, true], + true, + ); + + let expected = vec![ + Some(vec![]), + None, + Some(vec![]), + None, + None, + Some(vec![Some(3), Some(5), Some(19)]), + Some(vec![Some(3), Some(5), Some(19)]), + Some(vec![Some(6)]), + ]; + let expected = ListArray::from_iter_primitive::(expected); + + let expected: ArrayRef = Arc::new(expected); + assert_eq!(&result, &expected) + } + #[test] fn test_unlatched_uint64_list_spread_sliced() { let data = vec![ diff --git a/crates/sparrow-runtime/src/execute/operation/testing.rs b/crates/sparrow-runtime/src/execute/operation/testing.rs index fc619808b..be209ecd5 100644 --- a/crates/sparrow-runtime/src/execute/operation/testing.rs +++ b/crates/sparrow-runtime/src/execute/operation/testing.rs @@ -4,11 +4,11 @@ use anyhow::Context; use arrow::datatypes::{DataType, Field, Schema, TimeUnit}; use arrow::record_batch::RecordBatch; use itertools::Itertools; -use sparrow_api::kaskada::v1alpha::{ComputePlan, OperationPlan, PlanHash}; +use sparrow_api::kaskada::v1alpha::{ComputePlan, OperationPlan}; use sparrow_compiler::DataContext; -use crate::execute::key_hash_inverse::{KeyHashInverse, ThreadSafeKeyHashInverse}; use crate::execute::operation::{OperationContext, OperationExecutor}; +use crate::key_hash_inverse::ThreadSafeKeyHashInverse; use crate::stores::ObjectStoreRegistry; use crate::Batch; @@ -173,15 +173,13 @@ pub(super) async fn run_operation( // Channel for the output stats. let (progress_updates_tx, _) = tokio::sync::mpsc::channel(29); - let key_hash_inverse = KeyHashInverse::from_data_type(DataType::Utf8); - let key_hash_inverse = Arc::new(ThreadSafeKeyHashInverse::new(key_hash_inverse)); + let key_hash_inverse = Arc::new(ThreadSafeKeyHashInverse::from_data_type(&DataType::Utf8)); let mut context = OperationContext { plan: ComputePlan { operations: vec![plan], ..ComputePlan::default() }, - plan_hash: PlanHash::default(), object_stores: Arc::new(ObjectStoreRegistry::default()), data_context: DataContext::default(), compute_store: None, @@ -190,6 +188,7 @@ pub(super) async fn run_operation( progress_updates_tx, output_at_time: None, bounded_lateness_ns: None, + materialize: false, }; executor .execute( @@ -223,8 +222,7 @@ pub(super) async fn run_operation_json( inputs.push(receiver); } - let key_hash_inverse = KeyHashInverse::from_data_type(DataType::Utf8); - let key_hash_inverse = Arc::new(ThreadSafeKeyHashInverse::new(key_hash_inverse)); + let key_hash_inverse = Arc::new(ThreadSafeKeyHashInverse::from_data_type(&DataType::Utf8)); let (max_event_tx, mut max_event_rx) = tokio::sync::mpsc::unbounded_channel(); @@ -239,7 +237,6 @@ pub(super) async fn run_operation_json( operations: vec![plan], ..ComputePlan::default() }, - plan_hash: PlanHash::default(), object_stores: Arc::new(ObjectStoreRegistry::default()), data_context: DataContext::default(), compute_store: None, @@ -248,6 +245,7 @@ pub(super) async fn run_operation_json( progress_updates_tx, output_at_time: None, bounded_lateness_ns: None, + materialize: false, }; executor .execute( diff --git a/crates/sparrow-runtime/src/execute/operation/tick.rs b/crates/sparrow-runtime/src/execute/operation/tick.rs index 367285d86..23d71a26d 100644 --- a/crates/sparrow-runtime/src/execute/operation/tick.rs +++ b/crates/sparrow-runtime/src/execute/operation/tick.rs @@ -453,8 +453,10 @@ async fn send_tick_batch( // The subsort value is set to `u64::MAX` in order to ensure ticks are // processed after all other rows at the same time. let subsort_column = - // SAFETY: We create the iterator with a known / fixed length. + + // SAFETY: We create the iterator with a known / fixed length. unsafe { UInt64Array::from_trusted_len_iter(std::iter::repeat(Some(u64::MAX)).take(len)) }; + let subsort_column: ArrayRef = Arc::new(subsort_column); // Create a tick column consisting of booleans set to `true`. diff --git a/crates/sparrow-runtime/src/execute/operation/with_key.rs b/crates/sparrow-runtime/src/execute/operation/with_key.rs index 9b50762c4..5dcb2093f 100644 --- a/crates/sparrow-runtime/src/execute/operation/with_key.rs +++ b/crates/sparrow-runtime/src/execute/operation/with_key.rs @@ -2,10 +2,10 @@ use std::sync::Arc; use super::BoxedOperation; use crate::execute::error::{invalid_operation, Error}; -use crate::execute::key_hash_inverse::ThreadSafeKeyHashInverse; use crate::execute::operation::expression_executor::InputColumn; use crate::execute::operation::single_consumer_helper::SingleConsumerHelper; use crate::execute::operation::{InputBatch, Operation, OperationContext}; +use crate::key_hash_inverse::ThreadSafeKeyHashInverse; use crate::Batch; use anyhow::Context; use async_trait::async_trait; @@ -104,7 +104,7 @@ impl WithKeyOperation { // Hash the new key column let new_keys = input.column(self.new_key_input_index); - let new_key_hashes = sparrow_arrow::hash::hash(new_keys)?; + let new_key_hashes = sparrow_arrow::hash::hash(new_keys).map_err(|e| e.into_error())?; let time = input.column(0); let subsort = input.column(1); @@ -115,8 +115,9 @@ impl WithKeyOperation { // primary grouping to produce the key hash inverse for output. if self.is_primary_grouping { self.key_hash_inverse - .add(new_keys.to_owned(), &new_key_hashes) - .await?; + .add(new_keys.as_ref(), &new_key_hashes) + .await + .map_err(|e| e.into_error())?; } // Get the take indices, which will allow us to get the requested columns from @@ -234,10 +235,10 @@ mod tests { .unwrap(); insta::assert_snapshot!(run_operation(vec![input], plan).await.unwrap(), @r###" _time,_subsort,_key_hash,e2,e3 - 1970-01-01T00:00:00.000002000,0,16001504133914743519,0.2,1.2 - 1970-01-01T00:00:00.000003000,0,8744336087600879417,2.0,4.0 - 1970-01-01T00:00:00.000004000,0,16001504133914743519,3.2,6.2 - 1970-01-01T00:00:00.000005000,0,16001504133914743519,2.1,6.1 + 1970-01-01T00:00:00.000002000,0,11333881584776451256,0.2,1.2 + 1970-01-01T00:00:00.000003000,0,4285267486210181199,2.0,4.0 + 1970-01-01T00:00:00.000004000,0,11333881584776451256,3.2,6.2 + 1970-01-01T00:00:00.000005000,0,11333881584776451256,2.1,6.1 "###); } } diff --git a/crates/sparrow-runtime/src/execute/output.rs b/crates/sparrow-runtime/src/execute/output.rs index eb772bdaf..aac939595 100644 --- a/crates/sparrow-runtime/src/execute/output.rs +++ b/crates/sparrow-runtime/src/execute/output.rs @@ -8,18 +8,16 @@ use error_stack::{FutureExt as ESFutureExt, IntoReport, Result, ResultExt}; use futures::stream::BoxStream; use futures::{FutureExt, StreamExt}; use itertools::Itertools; -use sparrow_api::kaskada::v1alpha::destination::Destination; use sparrow_api::kaskada::v1alpha::execute_request::Limits; -use sparrow_api::kaskada::v1alpha::{self, data_type}; +use sparrow_api::kaskada::v1alpha::{data_type, ObjectStoreDestination, PulsarDestination}; use sparrow_arrow::downcast::{downcast_primitive_array, downcast_struct_array}; -use crate::execute::key_hash_inverse::ThreadSafeKeyHashInverse; use crate::execute::operation::OperationContext; use crate::execute::progress_reporter::ProgressUpdate; +use crate::key_hash_inverse::ThreadSafeKeyHashInverse; use crate::Batch; mod object_store; -mod redis; pub mod pulsar; @@ -28,11 +26,10 @@ pub enum Error { Schema { detail: String, }, - WritingToDestination { - dest_name: String, - }, + #[display(fmt = "writing to destination '{_0}'")] + WritingToDestination(&'static str), UnspecifiedDestination, - #[cfg(not(feature = "pulsar"))] + #[allow(dead_code)] FeatureNotEnabled { feature: String, }, @@ -40,17 +37,55 @@ pub enum Error { impl error_stack::Context for Error {} +/// The output destination. +/// +/// TODO: Replace the protobuf destinations with pure Rust structs. +#[derive(Debug)] +pub enum Destination { + ObjectStore(ObjectStoreDestination), + #[cfg(feature = "pulsar")] + Pulsar(PulsarDestination), + Channel(tokio::sync::mpsc::Sender), +} + +impl TryFrom for Destination { + type Error = error_stack::Report; + + fn try_from( + value: sparrow_api::kaskada::v1alpha::Destination, + ) -> std::result::Result { + let destination = value.destination.ok_or(Error::UnspecifiedDestination)?; + match destination { + sparrow_api::kaskada::v1alpha::destination::Destination::ObjectStore(destination) => { + Ok(Destination::ObjectStore(destination)) + } + #[cfg(not(feature = "pulsar"))] + sparrow_api::kaskada::v1alpha::destination::Destination::Pulsar(_) => { + error_stack::bail!(Error::FeatureNotEnabled { + feature: "pulsar".to_owned() + }) + } + #[cfg(feature = "pulsar")] + sparrow_api::kaskada::v1alpha::destination::Destination::Pulsar(pulsar) => { + Ok(Destination::Pulsar(pulsar)) + } + } + } +} + /// Write the batches to the given output destination. pub(super) fn write( context: &OperationContext, limits: Limits, batches: BoxStream<'static, Batch>, progress_updates_tx: tokio::sync::mpsc::Sender, - destination: v1alpha::Destination, + destination: Destination, + max_batch_size: Option, ) -> error_stack::Result> + 'static, Error> { let sink_schema = determine_output_schema(context)?; // Clone things that need to move into the async stream. + let max_batch_size = max_batch_size.unwrap_or(usize::MAX); let sink_schema_clone = sink_schema.clone(); let key_hash_inverse = context.key_hash_inverse.clone(); let batches = async_stream::stream! { @@ -74,7 +109,18 @@ pub(super) fn write( batch }; - yield post_process_batch(&sink_schema, batch, &key_hash_inverse).await; + + if batch.num_rows() > max_batch_size { + for start in (0..batch.num_rows()).step_by(max_batch_size) { + let end = (start + max_batch_size).min(batch.num_rows()); + let length = end - start; + let batch = batch.slice(start, length); + yield post_process_batch(&sink_schema, batch, &key_hash_inverse).await; + } + } else { + yield post_process_batch(&sink_schema, batch, &key_hash_inverse).await; + } + if limit_rows && remaining == 0 { break; @@ -83,9 +129,6 @@ pub(super) fn write( } .boxed(); - let destination = destination - .destination - .ok_or(Error::UnspecifiedDestination)?; match destination { Destination::ObjectStore(destination) => Ok(object_store::write( context.object_stores.clone(), @@ -94,38 +137,39 @@ pub(super) fn write( progress_updates_tx, batches, ) - .change_context(Error::WritingToDestination { - dest_name: "object_store".to_owned(), - }) + .change_context(Error::WritingToDestination("object_store")) .boxed()), - Destination::Redis(redis) => { - Ok( - redis::write(redis, sink_schema, progress_updates_tx, batches) - .change_context(Error::WritingToDestination { - dest_name: "redis".to_owned(), - }) - .boxed(), - ) - } - #[cfg(not(feature = "pulsar"))] - Destination::Pulsar(_) => { - error_stack::bail!(Error::FeatureNotEnabled { - feature: "pulsar".to_owned() - }) + Destination::Channel(channel) => { + Ok(write_to_channel(batches, channel, progress_updates_tx).boxed()) } #[cfg(feature = "pulsar")] Destination::Pulsar(pulsar) => { Ok( pulsar::write(pulsar, sink_schema, progress_updates_tx, batches) - .change_context(Error::WritingToDestination { - dest_name: "pulsar".to_owned(), - }) + .change_context(Error::WritingToDestination("pulsar")) .boxed(), ) } } } +async fn write_to_channel( + mut batches: BoxStream<'static, RecordBatch>, + channel: tokio::sync::mpsc::Sender, + _progress_updates_tx: tokio::sync::mpsc::Sender, +) -> error_stack::Result<(), Error> { + while let Some(next) = batches.next().await { + channel + .send(next) + .await + .map_err(|_e| error_stack::report!(Error::WritingToDestination("channel")))?; + + // progress_updates_tx.send(ProgressUpdate::Output { num_rows }) + } + + Ok(()) +} + /// Adds additional information to an output batch. async fn post_process_batch( sink_schema: &SchemaRef, diff --git a/crates/sparrow-runtime/src/execute/output/object_store.rs b/crates/sparrow-runtime/src/execute/output/object_store.rs index f94ac7995..87483399e 100644 --- a/crates/sparrow-runtime/src/execute/output/object_store.rs +++ b/crates/sparrow-runtime/src/execute/output/object_store.rs @@ -188,7 +188,7 @@ pub(super) async fn write( // Inform tracker of destination type progress_updates_tx .send(ProgressUpdate::Destination { - destination: Destination::ObjectStore(destination.clone()), + destination: Some(Destination::ObjectStore(destination.clone())), }) .await .into_report() diff --git a/crates/sparrow-runtime/src/execute/output/pulsar.rs b/crates/sparrow-runtime/src/execute/output/pulsar.rs index c92b175c9..7a68cf05c 100644 --- a/crates/sparrow-runtime/src/execute/output/pulsar.rs +++ b/crates/sparrow-runtime/src/execute/output/pulsar.rs @@ -91,9 +91,9 @@ pub(super) async fn write( // Inform tracker of output type progress_updates_tx .send(ProgressUpdate::Destination { - destination: destination::Destination::Pulsar(PulsarDestination { + destination: Some(destination::Destination::Pulsar(PulsarDestination { config: Some(pulsar.clone()), - }), + })), }) .await .into_report() diff --git a/crates/sparrow-runtime/src/execute/output/redis.rs b/crates/sparrow-runtime/src/execute/output/redis.rs deleted file mode 100644 index 34a51b9c0..000000000 --- a/crates/sparrow-runtime/src/execute/output/redis.rs +++ /dev/null @@ -1,21 +0,0 @@ -use crate::execute::progress_reporter::ProgressUpdate; -use arrow::datatypes::SchemaRef; -use arrow::record_batch::RecordBatch; -use error_stack::Result; -use futures::stream::BoxStream; -use sparrow_api::kaskada::v1alpha::RedisDestination; - -#[derive(derive_more::Display, Debug)] -#[display(fmt = "Redis Destination is unsupported")] -pub struct Error; - -impl error_stack::Context for Error {} - -pub(super) async fn write( - _redis: RedisDestination, - _schema: SchemaRef, - _progress_updates_tx: tokio::sync::mpsc::Sender, - _batches: BoxStream<'static, RecordBatch>, -) -> Result<(), Error> { - error_stack::bail!(Error); -} diff --git a/crates/sparrow-runtime/src/execute/progress_reporter.rs b/crates/sparrow-runtime/src/execute/progress_reporter.rs index d1208a90d..c01310770 100644 --- a/crates/sparrow-runtime/src/execute/progress_reporter.rs +++ b/crates/sparrow-runtime/src/execute/progress_reporter.rs @@ -43,7 +43,7 @@ struct ProgressTracker { pub(crate) enum ProgressUpdate { /// Informs the progress tracker of the output destination. Destination { - destination: destination::Destination, + destination: Option, }, /// Progress update reported for each table indicating total size. InputMetadata { total_num_rows: usize }, @@ -90,7 +90,7 @@ impl ProgressTracker { fn process_update(&mut self, stats: ProgressUpdate) { match stats { ProgressUpdate::Destination { destination } => { - self.destination = Some(destination); + self.destination = destination; } ProgressUpdate::InputMetadata { total_num_rows } => { self.progress.total_input_rows += total_num_rows as i64; @@ -135,18 +135,15 @@ impl ProgressTracker { flight_record_path: None, plan_yaml_path: None, compute_snapshots: Vec::new(), - destination: Some(destination), + destination, }) } - fn destination_to_output(&mut self) -> error_stack::Result { + fn destination_to_output(&mut self) -> error_stack::Result, Error> { // Clone the output paths in for object store destinations - let destination = self - .destination - .as_ref() - .ok_or(Error::Internal("expected destination"))?; - match destination { - destination::Destination::ObjectStore(store) => Ok(Destination { + match self.destination.as_ref() { + None => Ok(None), + Some(destination::Destination::ObjectStore(store)) => Ok(Some(Destination { destination: Some(destination::Destination::ObjectStore( ObjectStoreDestination { file_type: store.file_type, @@ -156,18 +153,18 @@ impl ProgressTracker { }), }, )), - }), + })), #[cfg(not(feature = "pulsar"))] - output_to::Destination::Pulsar(pulsar) => { + Some(destination::Destination::Pulsar(pulsar)) => { error_stack::bail!(Error::FeatureNotEnabled { feature: "pulsar" }) } #[cfg(feature = "pulsar")] - destination::Destination::Pulsar(pulsar) => { + Some(destination::Destination::Pulsar(pulsar)) => { let config = pulsar .config .as_ref() .ok_or(Error::internal_msg("missing config"))?; - Ok(Destination { + Ok(Some(Destination { destination: Some(destination::Destination::Pulsar(PulsarDestination { config: Some(PulsarConfig { broker_service_url: config.broker_service_url.clone(), @@ -179,10 +176,7 @@ impl ProgressTracker { admin_service_url: config.admin_service_url.clone(), }), })), - }) - } - destination::Destination::Redis(_) => { - error_stack::bail!(Error::UnsupportedOutput { output: "redis" }) + })) } } } @@ -237,7 +231,7 @@ pub(super) fn progress_stream( } } - let output = match tracker.destination_to_output() { + let destination = match tracker.destination_to_output() { Ok(output) => output, Err(e) => { yield Err(e); @@ -252,7 +246,7 @@ pub(super) fn progress_stream( flight_record_path: None, plan_yaml_path: None, compute_snapshots, - destination: Some(output), + destination, }); yield final_result; break diff --git a/crates/sparrow-runtime/src/execute/key_hash_inverse.rs b/crates/sparrow-runtime/src/key_hash_inverse.rs similarity index 65% rename from crates/sparrow-runtime/src/execute/key_hash_inverse.rs rename to crates/sparrow-runtime/src/key_hash_inverse.rs index 645ff753b..055bacf59 100644 --- a/crates/sparrow-runtime/src/execute/key_hash_inverse.rs +++ b/crates/sparrow-runtime/src/key_hash_inverse.rs @@ -1,16 +1,17 @@ use std::str::FromStr; use anyhow::Context; -use arrow::array::{Array, ArrayRef, PrimitiveArray, UInt64Array}; +use arrow::array::{Array, ArrayRef, AsArray, PrimitiveArray, UInt64Array}; use arrow::datatypes::{DataType, UInt64Type}; -use error_stack::{IntoReportCompat, ResultExt}; +use error_stack::{IntoReport, IntoReportCompat, ResultExt}; use futures::TryStreamExt; +use hashbrown::hash_map::Entry; use hashbrown::HashMap; use sparrow_arrow::downcast::downcast_primitive_array; use sparrow_compiler::DataContext; +use sparrow_instructions::GroupId; use sparrow_instructions::{ComputeStore, StoreKey}; -use sparrow_plan::GroupId; use crate::read::ParquetFile; use crate::stores::{ObjectStoreRegistry, ObjectStoreUrl}; @@ -21,7 +22,7 @@ use crate::stores::{ObjectStoreRegistry, ObjectStoreUrl}; /// If the entity key type is null, then all inverse keys are null. #[derive(serde::Serialize, serde::Deserialize)] pub struct KeyHashInverse { - key_hash_to_indices: HashMap, + key_hash_to_indices: HashMap, #[serde(with = "sparrow_arrow::serde::array_ref")] key: ArrayRef, } @@ -45,6 +46,19 @@ pub enum Error { OpeningMetadata, #[display(fmt = "failed to read metadata")] ReadingMetadata, + #[display(fmt = "key hashes contained nulls")] + KeyHashContainedNull, + #[display(Fmt = "error in Arrow kernel")] + Arrow, + #[display(fmt = "key hash not registered")] + MissingKeyHash, + #[display(fmt = "key hashes and keys are of different lengths ({keys} != {key_hashes})")] + MismatchedLengths { keys: usize, key_hashes: usize }, + #[display(fmt = "incompatible key types (expected: {expected:?}, actual: {actual:?})")] + IncompatibleKeyTypes { + expected: DataType, + actual: DataType, + }, } impl error_stack::Context for Error {} @@ -68,10 +82,10 @@ impl KeyHashInverse { } /// Creates a new key hash inverse from a primary grouping data type. - pub fn from_data_type(primary_grouping_type: DataType) -> Self { + pub fn from_data_type(primary_grouping_type: &DataType) -> Self { Self { key_hash_to_indices: HashMap::new(), - key: arrow::array::new_empty_array(&primary_grouping_type), + key: arrow::array::new_empty_array(primary_grouping_type), } } @@ -109,11 +123,29 @@ impl KeyHashInverse { .into_report() .change_context(Error::ReadingMetadata)?; let entity_key_col = batch.column(1); - self.add(entity_key_col.to_owned(), hash_col) - .into_report() + self.add(entity_key_col.as_ref(), hash_col) .change_context(Error::ReadingMetadata)?; } + // HACKY: Add the in-memory batches to the key hash inverse. + let in_memory = data_context + .tables_for_grouping(primary_grouping) + .flat_map(|table| { + table.in_memory.as_ref().map(|batch| { + let batch = batch.current(); + let keys = batch + .column_by_name(&table.config().group_column_name) + .unwrap(); + let key_hashes = batch.columns()[2].clone(); + (keys.clone(), key_hashes.clone()) + }) + }); + for (keys, key_hashes) in in_memory { + self.add(keys.as_ref(), key_hashes.as_primitive()) + .change_context(Error::ReadingMetadata) + .unwrap(); + } + Ok(()) } @@ -123,30 +155,53 @@ impl KeyHashInverse { /// values are aligned to map from a key to a hash per index. The /// current implementation eagerly adds the keys and hashes to the /// inverse but can be optimized to perform the addition lazily. - fn add(&mut self, keys: ArrayRef, key_hashes: &UInt64Array) -> anyhow::Result<()> { + fn add( + &mut self, + keys: &dyn Array, + key_hashes: &UInt64Array, + ) -> error_stack::Result<(), Error> { // Since the keys map to the key hashes directly, both arrays need to be the // same length - anyhow::ensure!(keys.len() == key_hashes.len()); + error_stack::ensure!(key_hashes.null_count() == 0, Error::KeyHashContainedNull); + error_stack::ensure!( + keys.data_type() == self.key.data_type(), + Error::IncompatibleKeyTypes { + expected: self.key.data_type().clone(), + actual: keys.data_type().clone(), + } + ); + let mut len = self.key_hash_to_indices.len() as u64; + // Determine the indices that we need to add. let indices_from_batch: Vec = key_hashes + .values() .iter() .enumerate() .flat_map(|(index, key_hash)| { - if let Some(key_hash) = key_hash { - if !self.key_hash_to_indices.contains_key(&key_hash) { - self.key_hash_to_indices - .insert(key_hash, self.key_hash_to_indices.len()); - return Some(index as u64); + match self.key_hash_to_indices.entry(*key_hash) { + Entry::Occupied(_) => { + // Key hash is already registered. + None + } + Entry::Vacant(vacancy) => { + vacancy.insert(len); + len += 1; + Some(index as u64) } } - None }) .collect(); + debug_assert_eq!(self.key_hash_to_indices.len(), len as usize); + if !indices_from_batch.is_empty() { let indices_from_batch: PrimitiveArray = PrimitiveArray::from_iter_values(indices_from_batch); - let keys = arrow::compute::take(&keys, &indices_from_batch, None)?; + let keys = arrow_select::take::take(keys, &indices_from_batch, None) + .into_report() + .change_context(Error::Arrow)?; let concatenated_keys: Vec<_> = vec![self.key.as_ref(), keys.as_ref()]; - let concatenated_keys = arrow::compute::concat(&concatenated_keys)?; + let concatenated_keys = arrow_select::concat::concat(&concatenated_keys) + .into_report() + .change_context(Error::Arrow)?; self.key = concatenated_keys; } Ok(()) @@ -164,19 +219,20 @@ impl KeyHashInverse { /// /// If the entity key type is null, then a null array is returned of same /// length. - pub fn inverse(&self, key_hashes: &UInt64Array) -> anyhow::Result { + pub fn inverse(&self, key_hashes: &UInt64Array) -> error_stack::Result { let mut key_hash_indices: Vec = Vec::new(); - for key_hash in key_hashes { - let key_hash = key_hash.with_context(|| "unable to get key_hash")?; + for key_hash in key_hashes.values() { let key_hash_index = self .key_hash_to_indices - .get(&key_hash) - .with_context(|| "unable to find key")?; - key_hash_indices.push(*key_hash_index as u64); + .get(key_hash) + .ok_or(Error::MissingKeyHash)?; + key_hash_indices.push(*key_hash_index); } let key_hash_indices: PrimitiveArray = PrimitiveArray::from_iter_values(key_hash_indices); - let result = arrow::compute::take(&self.key, &key_hash_indices, None)?; + let result = arrow_select::take::take(&self.key, &key_hash_indices, None) + .into_report() + .change_context(Error::Arrow)?; Ok(result) } } @@ -218,10 +274,15 @@ impl ThreadSafeKeyHashInverse { } } + /// Creates a new key hash inverse from a primary grouping data type. + pub fn from_data_type(primary_grouping_type: &DataType) -> Self { + Self::new(KeyHashInverse::from_data_type(primary_grouping_type)) + } + /// Lookup keys from a key hash array. /// /// This method is thread-safe and acquires the read-lock. - pub async fn inverse(&self, key_hashes: &UInt64Array) -> anyhow::Result { + pub async fn inverse(&self, key_hashes: &UInt64Array) -> error_stack::Result { let read = self.key_map.read().await; read.inverse(key_hashes) } @@ -235,8 +296,18 @@ impl ThreadSafeKeyHashInverse { /// This method is thread safe. It acquires the read lock to check if /// any of the keys need to be added to the inverse map, and only acquires /// the write lock if needed. - pub async fn add(&self, keys: ArrayRef, key_hashes: &UInt64Array) -> anyhow::Result<()> { - anyhow::ensure!(keys.len() == key_hashes.len()); + pub async fn add( + &self, + keys: &dyn Array, + key_hashes: &UInt64Array, + ) -> error_stack::Result<(), Error> { + error_stack::ensure!( + keys.len() == key_hashes.len(), + Error::MismatchedLengths { + keys: keys.len(), + key_hashes: key_hashes.len() + } + ); let has_new_keys = { let read = self.key_map.read().await; read.has_new_keys(key_hashes) @@ -250,6 +321,27 @@ impl ThreadSafeKeyHashInverse { } } + pub fn blocking_add( + &self, + keys: &dyn Array, + key_hashes: &UInt64Array, + ) -> error_stack::Result<(), Error> { + error_stack::ensure!( + keys.len() == key_hashes.len(), + Error::MismatchedLengths { + keys: keys.len(), + key_hashes: key_hashes.len() + } + ); + let has_new_keys = self.key_map.blocking_read().has_new_keys(key_hashes); + + if has_new_keys { + self.key_map.blocking_write().add(keys, key_hashes) + } else { + Ok(()) + } + } + /// Stores the KeyHashInverse to the compute store. /// /// This method is thread-safe and acquires the read-lock. @@ -267,15 +359,15 @@ mod tests { use arrow::datatypes::DataType; use sparrow_instructions::ComputeStore; - use crate::execute::key_hash_inverse::{KeyHashInverse, ThreadSafeKeyHashInverse}; + use crate::key_hash_inverse::{KeyHashInverse, ThreadSafeKeyHashInverse}; #[test] fn test_inverse_with_int32() { let keys = Arc::new(Int32Array::from(vec![100, 200])); let key_hashes = UInt64Array::from(vec![1, 2]); - let mut key_hash = KeyHashInverse::from_data_type(DataType::Int32); - key_hash.add(keys, &key_hashes).unwrap(); + let mut key_hash = KeyHashInverse::from_data_type(&DataType::Int32); + key_hash.add(keys.as_ref(), &key_hashes).unwrap(); let test_hashes = UInt64Array::from_iter_values([1, 2, 1]); let result = key_hash.inverse(&test_hashes).unwrap(); @@ -284,11 +376,11 @@ mod tests { #[test] fn test_inverse_with_string() { - let keys = Arc::new(StringArray::from(vec!["awkward", "tacos"])); + let keys = StringArray::from(vec!["awkward", "tacos"]); let key_hashes = UInt64Array::from(vec![1, 2]); - let mut key_hash = KeyHashInverse::from_data_type(DataType::Utf8); - key_hash.add(keys, &key_hashes).unwrap(); + let mut key_hash = KeyHashInverse::from_data_type(&DataType::Utf8); + key_hash.add(&keys, &key_hashes).unwrap(); let test_hashes = UInt64Array::from_iter_values([1, 2, 1]); let result = key_hash.inverse(&test_hashes).unwrap(); @@ -300,10 +392,10 @@ mod tests { #[test] fn test_has_new_keys_no_new_keys() { - let keys = Arc::new(Int32Array::from(vec![100, 200])); + let keys = Int32Array::from(vec![100, 200]); let key_hashes = UInt64Array::from(vec![1, 2]); - let mut key_hash = KeyHashInverse::from_data_type(DataType::Int32); - key_hash.add(keys, &key_hashes).unwrap(); + let mut key_hash = KeyHashInverse::from_data_type(&DataType::Int32); + key_hash.add(&keys, &key_hashes).unwrap(); let verify_key_hashes = UInt64Array::from(vec![1, 2]); assert!(!key_hash.has_new_keys(&verify_key_hashes)); @@ -311,10 +403,10 @@ mod tests { #[test] fn test_has_new_keys_some_new_keys() { - let keys = Arc::new(Int32Array::from(vec![100, 200])); + let keys = Int32Array::from(vec![100, 200]); let key_hashes = UInt64Array::from(vec![1, 2]); - let mut key_hash = KeyHashInverse::from_data_type(DataType::Int32); - key_hash.add(keys, &key_hashes).unwrap(); + let mut key_hash = KeyHashInverse::from_data_type(&DataType::Int32); + key_hash.add(&keys, &key_hashes).unwrap(); let verify_key_hashes = UInt64Array::from(vec![1, 2, 3]); assert!(key_hash.has_new_keys(&verify_key_hashes)); @@ -322,10 +414,10 @@ mod tests { #[test] fn test_has_new_keys_all_new_keys() { - let keys = Arc::new(Int32Array::from(vec![100, 200])); + let keys = Int32Array::from(vec![100, 200]); let key_hashes = UInt64Array::from(vec![1, 2]); - let mut key_hash = KeyHashInverse::from_data_type(DataType::Int32); - key_hash.add(keys, &key_hashes).unwrap(); + let mut key_hash = KeyHashInverse::from_data_type(&DataType::Int32); + key_hash.add(&keys, &key_hashes).unwrap(); let verify_key_hashes = UInt64Array::from(vec![3, 4, 5]); assert!(key_hash.has_new_keys(&verify_key_hashes)); @@ -333,12 +425,12 @@ mod tests { #[tokio::test] async fn test_thread_safe_inverse_with_int32() { - let keys = Arc::new(Int32Array::from(vec![100, 200])); + let keys = Int32Array::from(vec![100, 200]); let key_hashes = UInt64Array::from(vec![1, 2]); - let key_hash = KeyHashInverse::from_data_type(DataType::Int32); + let key_hash = KeyHashInverse::from_data_type(&DataType::Int32); let key_hash = ThreadSafeKeyHashInverse::new(key_hash); - key_hash.add(keys, &key_hashes).await.unwrap(); + key_hash.add(&keys, &key_hashes).await.unwrap(); let test_hashes = UInt64Array::from_iter_values([1, 2, 1]); let result = key_hash.inverse(&test_hashes).await.unwrap(); @@ -347,12 +439,12 @@ mod tests { #[tokio::test] async fn test_thread_safe_inverse_with_string() { - let keys = Arc::new(StringArray::from(vec!["awkward", "tacos"])); + let keys = StringArray::from(vec!["awkward", "tacos"]); let key_hashes = UInt64Array::from(vec![1, 2]); - let key_hash = KeyHashInverse::from_data_type(DataType::Utf8); + let key_hash = KeyHashInverse::from_data_type(&DataType::Utf8); let key_hash = ThreadSafeKeyHashInverse::new(key_hash); - key_hash.add(keys, &key_hashes).await.unwrap(); + key_hash.add(&keys, &key_hashes).await.unwrap(); let test_hashes = UInt64Array::from_iter_values([1, 2, 1]); let result = key_hash.inverse(&test_hashes).await.unwrap(); @@ -388,9 +480,9 @@ mod tests { key_hash.store_to(&compute_store).unwrap(); let mut key_hash = KeyHashInverse::restore_from(&compute_store).unwrap(); - let keys = Arc::new(StringArray::from(vec!["party", "pizza"])); + let keys = StringArray::from(vec!["party", "pizza"]); let key_hashes = UInt64Array::from(vec![3, 4]); - key_hash.add(keys, &key_hashes).unwrap(); + key_hash.add(&keys, &key_hashes).unwrap(); let test_hashes = UInt64Array::from_iter_values([1, 2, 3, 4]); let result = key_hash.inverse(&test_hashes).unwrap(); assert_eq!( @@ -400,10 +492,10 @@ mod tests { } async fn test_key_hash_inverse() -> KeyHashInverse { - let keys = Arc::new(StringArray::from(vec!["awkward", "tacos"])); + let keys = StringArray::from(vec!["awkward", "tacos"]); let key_hashes = UInt64Array::from(vec![1, 2]); - let mut key_hash = KeyHashInverse::from_data_type(DataType::Utf8); - key_hash.add(keys, &key_hashes).unwrap(); + let mut key_hash = KeyHashInverse::from_data_type(&DataType::Utf8); + key_hash.add(&keys, &key_hashes).unwrap(); key_hash } diff --git a/crates/sparrow-runtime/src/lib.rs b/crates/sparrow-runtime/src/lib.rs index 0841848b6..e9e9e6621 100644 --- a/crates/sparrow-runtime/src/lib.rs +++ b/crates/sparrow-runtime/src/lib.rs @@ -25,7 +25,7 @@ mod batch; pub mod execute; mod key_hash_index; -pub mod merge; +pub mod key_hash_inverse; mod metadata; mod min_heap; pub mod prepare; @@ -38,6 +38,7 @@ use std::path::PathBuf; pub use batch::*; pub use metadata::*; +pub use prepare::preparer; use read::*; use sparrow_api::kaskada::v1alpha::execute_request::Limits; @@ -47,6 +48,7 @@ static DETERMINISTIC_RUNTIME_HASHER: ahash::RandomState = #[derive(Debug, Default, Clone)] pub(crate) struct RuntimeOptions { pub limits: Limits, + pub max_batch_size: Option, /// Path to store the Query Flight Record to. /// Defaults to not storing anything. diff --git a/crates/sparrow-runtime/src/merge.rs b/crates/sparrow-runtime/src/merge.rs deleted file mode 100644 index dc73d38e1..000000000 --- a/crates/sparrow-runtime/src/merge.rs +++ /dev/null @@ -1,13 +0,0 @@ -pub(crate) use gatherer::*; -pub(crate) use homogeneous_merge::*; - -mod binary_merge; -mod gatherer; -mod homogeneous_merge; -mod input; - -#[cfg(test)] -mod testing; - -// Public for benchmarks. -pub use binary_merge::{binary_merge, BinaryMergeInput}; diff --git a/crates/sparrow-runtime/src/min_heap.rs b/crates/sparrow-runtime/src/min_heap.rs index 60363f526..940799eec 100644 --- a/crates/sparrow-runtime/src/min_heap.rs +++ b/crates/sparrow-runtime/src/min_heap.rs @@ -46,10 +46,6 @@ impl Ord for PriorityElement { pub(crate) struct MinHeap(BinaryHeap>); impl MinHeap { - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - pub fn len(&self) -> usize { self.0.len() } diff --git a/crates/sparrow-runtime/src/prepare.rs b/crates/sparrow-runtime/src/prepare.rs index 9e05ffe46..45fcc87f4 100644 --- a/crates/sparrow-runtime/src/prepare.rs +++ b/crates/sparrow-runtime/src/prepare.rs @@ -16,6 +16,7 @@ mod error; pub(crate) mod execute_input_stream; mod prepare_input_stream; mod prepare_metadata; +pub mod preparer; mod slice_preparer; pub use error::*; diff --git a/crates/sparrow-runtime/src/prepare/column_behavior.rs b/crates/sparrow-runtime/src/prepare/column_behavior.rs index 17531a312..6c5018d05 100644 --- a/crates/sparrow-runtime/src/prepare/column_behavior.rs +++ b/crates/sparrow-runtime/src/prepare/column_behavior.rs @@ -282,9 +282,8 @@ impl ColumnBehavior { } ); - let entity_column = sparrow_arrow::hash::hash(column) - .into_report() - .change_context(Error::PreparingColumn)?; + let entity_column = + sparrow_arrow::hash::hash(column).change_context(Error::PreparingColumn)?; Arc::new(entity_column) } diff --git a/crates/sparrow-runtime/src/prepare/execute_input_stream.rs b/crates/sparrow-runtime/src/prepare/execute_input_stream.rs index 524ae9abd..ff4ca880c 100644 --- a/crates/sparrow-runtime/src/prepare/execute_input_stream.rs +++ b/crates/sparrow-runtime/src/prepare/execute_input_stream.rs @@ -14,7 +14,7 @@ use sparrow_api::kaskada::v1alpha::{slice_plan, TableConfig}; use sparrow_arrow::downcast::downcast_primitive_array; use sparrow_core::TableSchema; -use crate::execute::key_hash_inverse::ThreadSafeKeyHashInverse; +use crate::key_hash_inverse::ThreadSafeKeyHashInverse; use crate::prepare::slice_preparer::SlicePreparer; use crate::prepare::Error; @@ -309,9 +309,8 @@ async fn update_key_inverse( .into_report() .change_context(Error::PreparingColumn)?; key_hash_inverse - .add(keys.clone(), key_hashes) + .add(keys.as_ref(), key_hashes) .await - .into_report() .change_context(Error::PreparingColumn)?; Ok(()) } @@ -329,7 +328,7 @@ mod tests { use static_init::dynamic; use uuid::Uuid; - use crate::execute::key_hash_inverse::{KeyHashInverse, ThreadSafeKeyHashInverse}; + use crate::key_hash_inverse::ThreadSafeKeyHashInverse; use crate::prepare::execute_input_stream; use crate::RawMetadata; @@ -379,9 +378,8 @@ mod tests { let batch2 = make_time_batch(&[6, 12, 10, 17, 11, 12]); let batch3 = make_time_batch(&[20]); let reader = futures::stream::iter(vec![Ok(batch1), Ok(batch2), Ok(batch3)]).boxed(); - let key_hash_inverse = Arc::new(ThreadSafeKeyHashInverse::new( - KeyHashInverse::from_data_type(DataType::UInt64), - )); + let key_hash_inverse = + Arc::new(ThreadSafeKeyHashInverse::from_data_type(&DataType::UInt64)); let raw_metadata = RawMetadata::from_raw_schema(RAW_SCHEMA.clone()).unwrap(); let mut stream = execute_input_stream::prepare_input( @@ -429,9 +427,8 @@ mod tests { let batch3 = make_time_batch(&[20]); let reader = futures::stream::iter(vec![Ok(batch1), Ok(batch2), Ok(batch3)]).boxed(); - let key_hash_inverse = Arc::new(ThreadSafeKeyHashInverse::new( - KeyHashInverse::from_data_type(DataType::UInt64), - )); + let key_hash_inverse = + Arc::new(ThreadSafeKeyHashInverse::from_data_type(&DataType::UInt64)); let raw_metadata = RawMetadata::from_raw_schema(RAW_SCHEMA.clone()).unwrap(); let mut stream = execute_input_stream::prepare_input( @@ -480,9 +477,8 @@ mod tests { let batch3 = make_time_batch(&[7, 17]); let reader = futures::stream::iter(vec![Ok(batch1), Ok(batch2), Ok(batch3)]).boxed(); - let key_hash_inverse = Arc::new(ThreadSafeKeyHashInverse::new( - KeyHashInverse::from_data_type(DataType::UInt64), - )); + let key_hash_inverse = + Arc::new(ThreadSafeKeyHashInverse::from_data_type(&DataType::UInt64)); let raw_metadata = RawMetadata::from_raw_schema(RAW_SCHEMA.clone()).unwrap(); let mut stream = execute_input_stream::prepare_input( diff --git a/crates/sparrow-runtime/src/prepare/preparer.rs b/crates/sparrow-runtime/src/prepare/preparer.rs new file mode 100644 index 000000000..40859a882 --- /dev/null +++ b/crates/sparrow-runtime/src/prepare/preparer.rs @@ -0,0 +1,225 @@ +use std::sync::Arc; + +use arrow::array::{ArrayRef, UInt64Array}; +use arrow::compute::SortColumn; +use arrow::datatypes::{ArrowPrimitiveType, DataType, SchemaRef, TimestampNanosecondType}; +use arrow::record_batch::RecordBatch; +use arrow_array::Array; +use error_stack::{IntoReport, ResultExt}; + +#[derive(derive_more::Display, Debug)] +pub enum Error { + #[display(fmt = "batch missing required column '{_0}'")] + BatchMissingRequiredColumn(String), + #[display(fmt = "failed to convert time column from type {_0:?} to timestamp_ns")] + ConvertTime(DataType), + #[display(fmt = "failed to convert subsort column from type {_0:?} to uint64")] + ConvertSubsort(DataType), + #[display(fmt = "failed to hash key array")] + HashingKeyArray, + #[display(fmt = "failed to create batch")] + CreatingBatch, + #[display(fmt = "failed to sort batch")] + SortingBatch, + #[display(fmt = "unrecognized time unit")] + UnrecognizedTimeUnit(String), +} + +impl error_stack::Context for Error {} + +pub struct Preparer { + prepared_schema: SchemaRef, + time_column_name: String, + subsort_column_name: Option, + next_subsort: u64, + key_column_name: String, + time_multiplier: Option, +} + +impl Preparer { + /// Create a new prepare produce data with the given schema. + pub fn new( + time_column_name: String, + subsort_column_name: Option, + key_column_name: String, + prepared_schema: SchemaRef, + prepare_hash: u64, + time_unit: Option<&str>, + ) -> error_stack::Result { + let time_multiplier = time_multiplier(time_unit)?; + Ok(Self { + prepared_schema, + time_column_name, + subsort_column_name, + next_subsort: prepare_hash, + key_column_name, + time_multiplier, + }) + } + + pub fn schema(&self) -> SchemaRef { + self.prepared_schema.clone() + } + + /// Prepare a batch of data. + /// + /// - This computes and adds the key columns. + /// - This sorts the batch by time, subsort and key hash. + /// - This adds or casts columns as needed. + /// + /// Self is mutated as necessary to ensure the `subsort` column is increasing, if + /// it is added. + pub fn prepare_batch(&mut self, batch: RecordBatch) -> error_stack::Result { + let time = get_required_column(&batch, &self.time_column_name)?; + let time = cast_to_timestamp(time, self.time_multiplier)?; + + let num_rows = batch.num_rows(); + let subsort = if let Some(subsort_column_name) = self.subsort_column_name.as_ref() { + let subsort = get_required_column(&batch, subsort_column_name)?; + arrow::compute::cast(time.as_ref(), &DataType::UInt64) + .into_report() + .change_context_lazy(|| Error::ConvertSubsort(subsort.data_type().clone()))? + } else { + let subsort: UInt64Array = (self.next_subsort..).take(num_rows).collect(); + self.next_subsort += num_rows as u64; + Arc::new(subsort) + }; + + let key = get_required_column(&batch, &self.key_column_name)?; + let key_hash = + sparrow_arrow::hash::hash(key.as_ref()).change_context(Error::HashingKeyArray)?; + let key_hash: ArrayRef = Arc::new(key_hash); + + let mut columns = Vec::with_capacity(self.prepared_schema.fields().len()); + + let indices = arrow::compute::lexsort_to_indices( + &[ + SortColumn { + values: time.clone(), + options: None, + }, + SortColumn { + values: subsort.clone(), + options: None, + }, + SortColumn { + values: key_hash.clone(), + options: None, + }, + ], + None, + ) + .into_report() + .change_context(Error::SortingBatch)?; + + let sort = |array: &ArrayRef| { + arrow::compute::take(array.as_ref(), &indices, None) + .into_report() + .change_context(Error::SortingBatch) + }; + columns.push(sort(&time)?); + columns.push(sort(&subsort)?); + columns.push(sort(&key_hash)?); + + // TODO: Slicing? + for field in self.prepared_schema.fields().iter().skip(3) { + let column = if let Some(column) = batch.column_by_name(field.name()) { + sort(column)? + } else { + arrow::array::new_null_array(field.data_type(), num_rows) + }; + columns.push(column) + } + let prepared = RecordBatch::try_new(self.prepared_schema.clone(), columns) + .into_report() + .change_context(Error::CreatingBatch)?; + Ok(prepared) + } +} + +fn get_required_column<'a>( + batch: &'a RecordBatch, + name: &str, +) -> error_stack::Result<&'a ArrayRef, Error> { + batch + .column_by_name(name) + .ok_or_else(|| error_stack::report!(Error::BatchMissingRequiredColumn(name.to_owned()))) +} + +fn time_multiplier(time_unit: Option<&str>) -> error_stack::Result, Error> { + match time_unit.unwrap_or("ns") { + "ns" => Ok(None), + "us" => Ok(Some(1_000)), + "ms" => Ok(Some(1_000_000)), + "s" => Ok(Some(1_000_000_000)), + unrecognized => error_stack::bail!(Error::UnrecognizedTimeUnit(unrecognized.to_owned())), + } +} + +fn cast_to_timestamp( + time: &ArrayRef, + time_multiplier: Option, +) -> error_stack::Result { + match time.data_type() { + DataType::UInt8 + | DataType::UInt16 + | DataType::UInt32 + | DataType::UInt64 + | DataType::Int8 + | DataType::Int16 + | DataType::Int32 + | DataType::Int64 => { + numeric_to_timestamp::(time.as_ref(), time_multiplier) + } + DataType::Float16 | DataType::Float32 | DataType::Float64 => { + numeric_to_timestamp::( + time.as_ref(), + time_multiplier.map(|m| m as f64), + ) + } + DataType::Utf8 | DataType::Date32 | DataType::Date64 | DataType::Timestamp(_, _) => { + arrow::compute::cast(time.as_ref(), &TimestampNanosecondType::DATA_TYPE) + .into_report() + .change_context_lazy(|| Error::ConvertTime(time.data_type().clone())) + } + other => { + error_stack::bail!(Error::ConvertTime(other.clone())) + } + } +} + +fn numeric_to_timestamp( + raw: &dyn Array, + time_multiplier: Option, +) -> error_stack::Result { + let error = || Error::ConvertTime(raw.data_type().clone()); + + // First, cast to `T::DATA_TYPE`. + let time = arrow::compute::cast(raw, &T::DATA_TYPE) + .into_report() + .change_context_lazy(error)?; + + // Perform the multiplication on the `T::DATA_TYPE`. + // Do this before conversion to int64 so we don't lose f64 precision. + let time = if let Some(time_multiplier) = time_multiplier { + arrow::compute::multiply_scalar_dyn::(time.as_ref(), time_multiplier) + .into_report() + .change_context_lazy(error)? + } else { + time + }; + + // Convert to int64 (if necessary). + let time = if T::DATA_TYPE == DataType::Int64 { + time + } else { + arrow::compute::cast(time.as_ref(), &DataType::Int64) + .into_report() + .change_context_lazy(error)? + }; + + // Convert from int64 to nanosecond. This expects the units to already be converted, which they are. + arrow::compute::cast(time.as_ref(), &TimestampNanosecondType::DATA_TYPE) + .into_report() + .change_context_lazy(error) +} diff --git a/crates/sparrow-runtime/src/prepare/slice_preparer.rs b/crates/sparrow-runtime/src/prepare/slice_preparer.rs index 3b1be6b2f..572772163 100644 --- a/crates/sparrow-runtime/src/prepare/slice_preparer.rs +++ b/crates/sparrow-runtime/src/prepare/slice_preparer.rs @@ -4,7 +4,7 @@ use arrow::array::{Array, ArrayRef, StringArray, UInt64Array}; use arrow::compute::eq_scalar; use arrow::datatypes::DataType; use arrow::record_batch::RecordBatch; -use error_stack::{IntoReport, IntoReportCompat, ResultExt}; +use error_stack::{IntoReport, ResultExt}; use hashbrown::HashSet; use sparrow_api::kaskada::v1alpha::slice_plan; use sparrow_arrow::downcast::downcast_primitive_array; @@ -47,7 +47,8 @@ impl SlicePreparer { entity_keys.null_count() ) ); - let entity_key_hashes = sparrow_arrow::hash::hash(&entity_keys)?; + let entity_key_hashes = + sparrow_arrow::hash::hash(&entity_keys).map_err(|e| e.into_error())?; let entity_key_hashes: &UInt64Array = downcast_primitive_array(&entity_key_hashes)?; let desired_keys: HashSet = entity_key_hashes.values().iter().copied().collect(); @@ -125,9 +126,7 @@ impl SlicePreparer { record_batch: &RecordBatch, ) -> error_stack::Result { let entity_column = record_batch.column(self.entity_column_index); - sparrow_arrow::hash::hash(entity_column) - .into_report() - .change_context(Error::SlicingBatch) + sparrow_arrow::hash::hash(entity_column).change_context(Error::SlicingBatch) } } @@ -138,12 +137,10 @@ mod tests { use arrow::array::{Int32Array, StringArray}; use arrow::datatypes::{DataType, Field, Schema}; use arrow::record_batch::RecordBatch; + use sparrow_arrow::hash::hash; use super::*; - const HASH_A: u64 = 7636293598395510443; - const HASH_B: u64 = 2637710838665036908; - #[test] fn test_preparer_slice_batch_100_percent() { let entity_column_index = 0; @@ -181,7 +178,7 @@ mod tests { prepare_filter: PrepareFilter::PercentFilter { percent }, }; - let entity_key_column = StringArray::from(vec!["a", "a", "e", "e"]); + let entity_key_column = StringArray::from(vec!["a", "a", "c", "c"]); let data_column = Int32Array::from(vec![1, 2, 3, 4]); let schema = Schema::new(vec![ Field::new("id", DataType::Utf8, false), @@ -222,16 +219,21 @@ mod tests { .unwrap(); let sliced_batch = preparer.slice_batch(batch).unwrap(); - let expected_batch_size = 3; + let expected_batch_size = 2; assert_eq!(sliced_batch.num_rows(), expected_batch_size); } #[test] fn test_preparer_slice_specific_entity_key() { let entity_column_index = 0; + + let entity_key_column = + StringArray::from(vec!["a", "b", "a", "b", "e", "f", "g", "h", "i"]); + let hashes = hash(&entity_key_column).unwrap(); + let mut hash_set = HashSet::new(); - hash_set.insert(HASH_A); - hash_set.insert(HASH_B); + hash_set.insert(hashes.value(0)); + hash_set.insert(hashes.value(1)); let preparer = SlicePreparer { entity_column_index, @@ -240,8 +242,6 @@ mod tests { }, }; - let entity_key_column = - StringArray::from(vec!["a", "b", "a", "b", "e", "f", "g", "h", "i"]); let data_column = Int32Array::from(vec![1, 2, 3, 4, 5, 6, 7, 8, 9]); let schema = Schema::new(vec![ Field::new("id", DataType::Utf8, false), diff --git a/crates/sparrow-runtime/src/read/table_reader.rs b/crates/sparrow-runtime/src/read/table_reader.rs index 37cb07961..a837002dd 100644 --- a/crates/sparrow-runtime/src/read/table_reader.rs +++ b/crates/sparrow-runtime/src/read/table_reader.rs @@ -17,12 +17,12 @@ use sparrow_qfr::{ use tokio_stream::StreamExt; use tracing::info; -use crate::merge::{homogeneous_merge, GatheredBatches, Gatherer}; use crate::min_heap::{HasPriority, MinHeap}; use crate::read::error::Error; use crate::read::parquet_stream::{self, new_parquet_stream}; use crate::stores::ObjectStoreRegistry; use crate::Batch; +use sparrow_merge::old::{homogeneous_merge, GatheredBatches, Gatherer}; const READ_TABLE: Activity = activity!("scan.read_file"); const GATHER_TABLE_BATCHES: Activity = activity!("scan.gather"); @@ -661,7 +661,7 @@ mod tests { let mut data_context = DataContext::default(); let schema = sparrow_api::kaskada::v1alpha::Schema::try_from(TABLE_SCHEMA.as_ref()).unwrap(); - let table_id = data_context + let table_info = data_context .add_table(ComputeTable { config: Some(CONFIG.clone()), metadata: Some(TableMetadata { @@ -674,7 +674,6 @@ mod tests { }], }) .unwrap(); - let table_info = data_context.table_info(table_id).unwrap(); let prepared_files = select_prepared_files(table_info, &None, Some(max_event_in_snapshot)).unwrap(); @@ -719,7 +718,7 @@ mod tests { let mut data_context = DataContext::default(); let schema = sparrow_api::kaskada::v1alpha::Schema::try_from(TABLE_SCHEMA.as_ref()).unwrap(); - let table_id = data_context + let table_info = data_context .add_table(ComputeTable { config: Some(CONFIG.clone()), metadata: Some(TableMetadata { @@ -732,7 +731,6 @@ mod tests { }], }) .unwrap(); - let table_info = data_context.table_info(table_id).unwrap(); let upper_bound_opt = if let Some(ts) = max_event_time { NaiveDateTime::from_timestamp_opt(ts.seconds, ts.nanos as u32) diff --git a/crates/sparrow-scheduler/Cargo.toml b/crates/sparrow-scheduler/Cargo.toml new file mode 100644 index 000000000..1f6db8e9c --- /dev/null +++ b/crates/sparrow-scheduler/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "sparrow-scheduler" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +publish = false +description = """ +Scheduler for managing local, multi-threaded execution. +""" + +[dependencies] +core_affinity.workspace = true +derive_more.workspace = true +error-stack.workspace = true +index_vec.workspace = true +itertools.workspace = true +serde.workspace = true +sparrow-arrow = { path = "../sparrow-arrow" } +tracing.workspace = true +work-queue = "0.1.4" + +[dev-dependencies] + +[target.'cfg(loom)'.dependencies] +loom = "0.6.0" + +[lib] +doctest = false diff --git a/crates/sparrow-scheduler/src/error.rs b/crates/sparrow-scheduler/src/error.rs new file mode 100644 index 000000000..210610d30 --- /dev/null +++ b/crates/sparrow-scheduler/src/error.rs @@ -0,0 +1,27 @@ +use crate::Partition; + +/// Top level errors reported during partitioned pipeline execution. +#[derive(derive_more::Display, Debug)] +pub enum Error { + #[display(fmt = "worker panicked")] + WorkerPanicked, + #[display(fmt = "spawning worker")] + SpawnWorker, + #[display( + fmt = "error executing {method} on partition {partition} of pipeline '{name}' ({index})" + )] + Pipeline { + method: &'static str, + index: usize, + name: &'static str, + partition: Partition, + }, + #[display(fmt = "dropped partition {partition} of pipeline '{name}' ({index})")] + PipelineDropped { + index: usize, + name: &'static str, + partition: Partition, + }, +} + +impl error_stack::Context for Error {} diff --git a/crates/sparrow-scheduler/src/lib.rs b/crates/sparrow-scheduler/src/lib.rs new file mode 100644 index 000000000..0a14aa506 --- /dev/null +++ b/crates/sparrow-scheduler/src/lib.rs @@ -0,0 +1,29 @@ +#![warn( + rust_2018_idioms, + nonstandard_style, + future_incompatible, + clippy::mod_module_files, + clippy::print_stdout, + clippy::print_stderr, + clippy::undocumented_unsafe_blocks +)] + +//! Scheduler for local, multi-threaded execution of Sparrow plans. + +mod error; +mod partition; +mod pipeline; +mod queue; +mod schedule_count; +mod sink; +mod task; +mod worker; +mod worker_pool; + +pub use error::*; +pub use partition::*; +pub use pipeline::*; +pub use sink::*; +pub use task::*; +pub use worker::*; +pub use worker_pool::*; diff --git a/crates/sparrow-scheduler/src/partition.rs b/crates/sparrow-scheduler/src/partition.rs new file mode 100644 index 000000000..cd8624de4 --- /dev/null +++ b/crates/sparrow-scheduler/src/partition.rs @@ -0,0 +1,8 @@ +index_vec::define_index_type! { + /// Wrapper around a partition. + pub struct Partition = u32; + + DISPLAY_FORMAT = "{}"; +} + +pub type Partitioned = index_vec::IndexVec; diff --git a/crates/sparrow-scheduler/src/pipeline.rs b/crates/sparrow-scheduler/src/pipeline.rs new file mode 100644 index 000000000..9630828b6 --- /dev/null +++ b/crates/sparrow-scheduler/src/pipeline.rs @@ -0,0 +1,128 @@ +use std::borrow::Cow; + +use sparrow_arrow::Batch; + +use crate::{Partition, Partitioned, Scheduler, TaskRef}; + +#[derive(derive_more::Display, Debug)] +pub enum PipelineError { + #[display(fmt = "invalid input index {input} for pipeline with {input_len} inputs")] + InvalidInput { input: usize, input_len: usize }, + #[display(fmt = "input {input} for partition {input_partition} is already closed")] + InputClosed { + input: usize, + input_partition: Partition, + }, + #[display(fmt = "illegal state: {_0}")] + IllegalState(Cow<'static, str>), + #[display(fmt = "error executing pipeline")] + Execution, +} + +impl PipelineError { + pub fn illegal_state(state: impl Into>) -> Self { + Self::IllegalState(state.into()) + } +} + +impl error_stack::Context for PipelineError {} + +/// A push-based interface used by the scheduler to drive query execution +/// +/// A pipeline processes data from one or more input partitions, producing output +/// to one or more output partitions. As a [`Pipeline`] may draw on input from +/// more than one upstream [`Pipeline`], input partitions are identified by both +/// a child index, and a partition index, whereas output partitions are only +/// identified by a partition index. +/// +/// This is not intended as an eventual replacement for the physical plan +/// representation, but rather a generic interface that parts of the physical +/// plan are converted to for execution. +/// +/// # Eager vs Lazy Execution +/// +/// Whether computation is eagerly done on push, or lazily done on pull, is +/// intentionally left as an implementation detail of the [`Pipeline`] +/// +/// This allows flexibility to support the following different patterns, and potentially more: +/// +/// An eager, push-based pipeline, that processes a batch synchronously in [`Pipeline::push`] +/// and immediately wakes the corresponding output partition. +/// +/// A parallel, push-based pipeline, that enqueues the processing of a batch to +/// the thread pool in [`Pipeline::push`], and wakes the corresponding output +/// partition when the job completes. Order and non-order preserving variants +/// are possible +/// +/// A merge pipeline which combines data from one or more input partitions into one or +/// more output partitions. [`Pipeline::push`] adds data to an input buffer, and wakes +/// any output partitions that may now be able to make progress. This may be none if +/// the operator is waiting on data from a different input partition. +/// +/// An aggregation pipeline which combines data from one or more input partitions into +/// a single output partition. [`Pipeline::push`] would eagerly update the computed +/// aggregates, and the final [`Pipeline::close`] trigger flushing these to the output. +/// It would also be possible to flush once the partial aggregates reach a certain size. +/// +/// A partition-aware aggregation pipeline, which functions similarly to the above, but +/// computes aggregations per input partition, before combining these prior to flush. +/// +/// An async input pipeline, which has no inputs, and wakes the output partition +/// whenever new data is available. +/// +/// A JIT compiled sequence of synchronous operators, that perform multiple operations +/// from the physical plan as a single [`Pipeline`]. Parallelized implementations +/// are also possible. +/// +pub trait Pipeline: Send + Sync + std::fmt::Debug { + /// Initialize the pipeline by providing the tasks it will use for scheduling. + /// + /// The number of tasks provided indicate the number of partitions this pipeline + /// should execute with. + /// + /// This is unfortunately separate to allow cyclic initialization. Specifically, + /// `Arc::new_cyclic` expects a function that doesn't take errors, while creating + /// many pipelines *does* produce errors. To address this, we first create the + /// part of the pipeline that doesn't need to reference it's own tasks, and then + /// we initialize it as part of `Arc::new_cyclic`. + fn initialize(&mut self, tasks: Partitioned); + + /// Add a [`Batch`] to the given input partition and input index. + /// + /// This is called from outside the pipeline -- either a Tokio thread + /// reading from a source or a producing pipeline. As a result, this should + /// generally add the batch to a mutex-protected buffer and ensure a task is + /// scheduled for executing this partition of this pipeline.. + /// + /// Schedules any tasks that need to be executed on the `scheduler`. + fn add_input( + &self, + input_partition: Partition, + input: usize, + batch: Batch, + scheduler: &mut dyn Scheduler, + ) -> error_stack::Result<(), PipelineError>; + + /// Mark an input partition and input index as complete. + /// + /// Schedules any tasks that need to be executed on the `scheduler`. + fn close_input( + &self, + input_partition: Partition, + input: usize, + scheduler: &mut dyn Scheduler, + ) -> error_stack::Result<(), PipelineError>; + + /// Run the pipeline on the data that has been pushed in. + /// + /// May schedule additional work to be done on the `scheduler`. + /// + /// Generally this should return after processing / producing a single + /// batch. If additional work must be done, this partition may be + /// re-scheduled with the `scheduler`. + fn do_work( + &self, + partition: Partition, + scheduler: &mut dyn Scheduler, + ) -> error_stack::Result<(), PipelineError>; +} diff --git a/crates/sparrow-scheduler/src/queue.rs b/crates/sparrow-scheduler/src/queue.rs new file mode 100644 index 000000000..d1ce3e2c6 --- /dev/null +++ b/crates/sparrow-scheduler/src/queue.rs @@ -0,0 +1,112 @@ +//! Provide the global and per-thread (local) work queues. +//! +//! These wrap the currently used crate to make it easy to swap in +//! different implementations. + +/// A cloneable, global queue for adding elements to any worker. +#[derive(Debug)] +#[repr(transparent)] +pub(crate) struct GlobalQueue { + queue: work_queue::Queue, +} + +/// The local queue for a specific worker. +/// +/// Generally, tasks are added (and processed) in FIFO order, but each local +/// queue has a single LIFO slot, allowing recently produced tasks to be +/// immediately executed. +/// +/// Also allows adding tasks to the global queue. +#[derive(Debug)] +#[repr(transparent)] +pub(crate) struct LocalQueue { + queue: work_queue::LocalQueue, +} + +// Manually implement Clone since we don't need `T: Clone`. +impl Clone for GlobalQueue { + fn clone(&self) -> Self { + Self { + queue: self.queue.clone(), + } + } +} + +impl GlobalQueue { + pub(crate) fn new(local_queues: usize, local_queue_size: u16) -> Self { + Self { + queue: work_queue::Queue::new(local_queues, local_queue_size), + } + } + + /// Take the local queues associated with this. + /// + /// May only be called once. + /// + /// Panics if the local queues have already been taken. + pub(crate) fn take_local_queues(&self) -> impl Iterator> + '_ { + self.queue.local_queues().map(|queue| LocalQueue { queue }) + } + + pub(crate) fn push(&self, item: T) { + self.queue.push(item) + } +} + +impl LocalQueue { + /// Pop an item from the local queue, or steal from the global and sibling queues if it is empty. + pub fn pop(&mut self) -> Option { + self.queue.pop() + } + + pub(crate) fn push(&mut self, item: T) { + self.queue.push(item) + } + + pub(crate) fn push_yield(&mut self, item: T) { + self.queue.push_yield(item) + } + + pub(crate) fn push_global(&self, item: T) { + self.queue.global().push(item) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_focus_on_recent_item() { + let global = GlobalQueue::new(1, 4); + let mut local = global.take_local_queues().next().unwrap(); + + local.push(1); + local.push(2); + local.push(3); + + // The fact this pops 3 is important. It is what makes sure the task + // most recently produced on this CPU (with the data already in the cache) + // is what is executed next. + assert_eq!(local.pop(), Some(3)); + + // The order of these tasks is somewhat unimportant. Currently, anything + // other than the "most recently" produced task is LIFO. It may be + // beneficial to be FIFO in case we have two tasks using "local" data. + assert_eq!(local.pop(), Some(1)); + assert_eq!(local.pop(), Some(2)); + } + + #[test] + fn test_take_global_items() { + let global = GlobalQueue::new(1, 4); + let mut local = global.take_local_queues().next().unwrap(); + + global.push(1); + global.push(2); + + // The local queue steals work from the global queue, which is LIFO. + assert_eq!(local.pop(), Some(1)); + assert_eq!(local.pop(), Some(2)); + } +} diff --git a/crates/sparrow-scheduler/src/schedule_count.rs b/crates/sparrow-scheduler/src/schedule_count.rs new file mode 100644 index 000000000..d96ff0e79 --- /dev/null +++ b/crates/sparrow-scheduler/src/schedule_count.rs @@ -0,0 +1,79 @@ +#[cfg(loom)] +pub(crate) use loom::sync::atomic::{AtomicUsize, Ordering}; + +#[cfg(not(loom))] +pub(crate) use std::sync::atomic::{AtomicUsize, Ordering}; + +#[repr(transparent)] +#[derive(Debug, Default)] +pub(crate) struct ScheduleCount(AtomicUsize); + +impl ScheduleCount { + /// Record a request for scheduling. + /// + /// Returns true if this task wasn't previously scheduled. + pub fn schedule(&self) -> bool { + self.0.fetch_add(1, Ordering::SeqCst) == 0 + } + + /// Returns a `TaskGuard` which will return the count + pub fn guard(&self) -> ScheduleGuard<'_> { + let entry_count = self.0.load(Ordering::SeqCst); + debug_assert!(entry_count > 0, "Running task with entry count 0"); + ScheduleGuard { + count: self, + entry_count, + } + } +} + +#[must_use] +pub(crate) struct ScheduleGuard<'a> { + count: &'a ScheduleCount, + entry_count: usize, +} + +impl<'a> ScheduleGuard<'a> { + /// Finish executing the task. + /// + /// This will reset the entry count. If the count has been increased during execution + /// this will return `true` to indicate the task should be re-scheduled. + pub fn finish(self) -> bool { + let schedule_count = self.count.0.fetch_sub(self.entry_count, Ordering::SeqCst); + schedule_count != self.entry_count + } +} + +#[cfg(test)] +mod tests { + + // Test using `loom` to verify atomic scheduling. + // + // To run: + // `RUSTFLAGS="--cfg loom" cargo test -p sparrow-scheduler schedule_count::*` + #[cfg(loom)] + #[test] + fn test_loom_scheduling() { + use super::*; + + loom::model(|| { + let count = loom::sync::Arc::new(ScheduleCount::default()); + assert!(count.schedule()); + + let handle = { + let count = count.clone(); + loom::thread::spawn(move || { + let guard = count.guard(); + assert!(!count.schedule()); + assert!(guard.finish()); + + let guard = count.guard(); + assert!(!guard.finish()); + }) + }; + + assert_eq!((), handle.join().unwrap()); + assert_eq!(0, count.0.load(Ordering::SeqCst)); + }) + } +} diff --git a/crates/sparrow-scheduler/src/sink.rs b/crates/sparrow-scheduler/src/sink.rs new file mode 100644 index 000000000..6f7fe7c15 --- /dev/null +++ b/crates/sparrow-scheduler/src/sink.rs @@ -0,0 +1,37 @@ +use std::sync::Arc; + +use sparrow_arrow::Batch; + +use crate::{Partition, Pipeline, PipelineError, Scheduler}; + +/// A struct used for sending batches to a specific input port of a down-stream pipeline. +#[derive(Debug)] +pub struct PipelineInput { + pipeline: Arc, + input: usize, +} + +impl PipelineInput { + pub fn new(pipeline: Arc, input: usize) -> Self { + Self { pipeline, input } + } + + pub fn add_input( + &self, + partition: Partition, + batch: Batch, + scheduler: &mut dyn Scheduler, + ) -> error_stack::Result<(), PipelineError> { + self.pipeline + .add_input(partition, self.input, batch, scheduler) + } + + pub fn close_input( + &self, + input_partition: Partition, + scheduler: &mut dyn Scheduler, + ) -> error_stack::Result<(), PipelineError> { + self.pipeline + .close_input(input_partition, self.input, scheduler) + } +} diff --git a/crates/sparrow-scheduler/src/task.rs b/crates/sparrow-scheduler/src/task.rs new file mode 100644 index 000000000..23fbac6a4 --- /dev/null +++ b/crates/sparrow-scheduler/src/task.rs @@ -0,0 +1,90 @@ +use std::sync::Arc; + +use error_stack::ResultExt; + +use crate::schedule_count::ScheduleCount; +use crate::{Error, Partition, Pipeline, Scheduler}; + +/// The unit of work executed by the scheduler. +/// +/// A task processes a single unit of input (typically a batch), applies a +/// single [Pipeline] and produces a single unit of output (typically a batch). +#[derive(Debug)] +pub struct Task { + /// Index of this pipeline. Used for debugging. + index: usize, + /// Name of the pipeline implementation. + name: &'static str, + /// The pipeline to execute. + /// + /// This is a weak reference to avoid cycles. + pipeline: std::sync::Weak, + /// The partition of the pipeline to execute. + partition: Partition, + /// An atomic counter tracking how many times the task has been submitted. + /// + /// This is reset after the task is executed. + schedule_count: ScheduleCount, +} + +impl Task { + /// Create a new task executing the given pipeline and partition. + pub(crate) fn new( + index: usize, + name: &'static str, + pipeline: std::sync::Weak, + partition: Partition, + ) -> Self { + Self { + index, + name, + pipeline, + partition, + schedule_count: ScheduleCount::default(), + } + } + + /// Mark this task as scheduled. + /// + /// Returns `false` if this task was previously scheduled and has not + /// yet been executed. + /// + /// Generally should only be called by the worker. + /// + /// If this is called while it is being executed (eg., during `do_work`) then + /// the `guard` will return `true` to indicate the task should be re-executed. + pub(crate) fn schedule(&self) -> bool { + self.schedule_count.schedule() + } + + fn pipeline(&self) -> error_stack::Result, Error> { + Ok(self.pipeline.upgrade().ok_or(Error::PipelineDropped { + index: self.index, + name: self.name, + partition: self.partition, + })?) + } + + fn error(&self, method: &'static str) -> Error { + Error::Pipeline { + method, + index: self.index, + name: self.name, + partition: self.partition, + } + } + + #[inline] + pub(crate) fn do_work( + &self, + scheduler: &mut dyn Scheduler, + ) -> error_stack::Result { + let guard = self.schedule_count.guard(); + self.pipeline()? + .do_work(self.partition, scheduler) + .change_context_lazy(|| self.error("do_work"))?; + Ok(guard.finish()) + } +} + +pub type TaskRef = Arc; diff --git a/crates/sparrow-scheduler/src/worker.rs b/crates/sparrow-scheduler/src/worker.rs new file mode 100644 index 000000000..c6064cd24 --- /dev/null +++ b/crates/sparrow-scheduler/src/worker.rs @@ -0,0 +1,105 @@ +use crate::{queue::*, Error, TaskRef}; + +pub trait Scheduler { + /// Schedule a task for immediate, local execution. + /// + /// For local queues, this will schedule it as the next task, potentially + /// displacing the other task(s) scheduled at the front. + /// + /// If the local queue is full, it will move half of its tasks to the global + /// queue. + /// + /// For the global queue, this will add to the end of the list of tasks. + fn schedule(&mut self, task: TaskRef); + + /// Schedule a task for eventual, local execution. + /// + /// For local and global queues, this will add the task to the end of the tasks. + /// + /// For local queues, this can be used to give other tasks a chance to run. + /// Otherwise, there’s a risk that one task will completely take over a + /// thread in a push-pop cycle due to the LIFO slot. + /// + /// If the local queue is full, it will move half of its tasks to the global + /// queue. + fn schedule_yield(&mut self, task: TaskRef); + + /// Schedule a task for eventual execution anywhere. + /// + /// For both the local and global queues this adds to the end of the global + /// queue. + fn schedule_global(&self, task: TaskRef); +} + +/// An injector that allows adding work to the global queue. +#[derive(Debug, Clone)] +pub struct Injector { + queue: GlobalQueue, +} + +impl Injector { + pub fn create(workers: usize, local_queue_size: u16) -> (Self, Vec) { + let queue = GlobalQueue::new(workers, local_queue_size); + let workers = queue + .take_local_queues() + .map(|queue| Worker { queue }) + .collect(); + (Injector { queue }, workers) + } +} + +impl Scheduler for Injector { + fn schedule_global(&self, task: TaskRef) { + if task.schedule() { + self.queue.push(task) + } + } + + fn schedule(&mut self, task: TaskRef) { + self.schedule_global(task) + } + + fn schedule_yield(&mut self, task: TaskRef) { + self.schedule_global(task) + } +} + +/// An individual worker that allows adding work to the local or global queue. +pub struct Worker { + queue: LocalQueue, +} + +impl Worker { + /// Run the work loop to completion. + pub(crate) fn work_loop(mut self) -> error_stack::Result<(), Error> { + while let Some(task) = self.queue.pop() { + if task.do_work(&mut self)? { + // This means that the task was schedule while we were executing. + // As a result, we didn't add it to any queue yet, so we need to + // do so now. + self.queue.push_global(task); + } + } + Ok(()) + } +} + +impl Scheduler for Worker { + fn schedule(&mut self, task: TaskRef) { + if task.schedule() { + self.queue.push(task) + } + } + + fn schedule_yield(&mut self, task: TaskRef) { + if task.schedule() { + self.queue.push_yield(task) + } + } + + fn schedule_global(&self, task: TaskRef) { + if task.schedule() { + self.queue.push_global(task) + } + } +} diff --git a/crates/sparrow-scheduler/src/worker_pool.rs b/crates/sparrow-scheduler/src/worker_pool.rs new file mode 100644 index 000000000..724eba7c4 --- /dev/null +++ b/crates/sparrow-scheduler/src/worker_pool.rs @@ -0,0 +1,142 @@ +use std::sync::Arc; + +use crate::worker::Injector; +use crate::{Error, Pipeline, Task, TaskRef}; +use error_stack::{IntoReport, ResultExt}; +use itertools::Itertools; + +/// Default thread count to use if we aren't able to determine +/// the number of cores. +const DEFAULT_THREAD_COUNT: usize = 8; + +/// Number of slots each thread should have in it's local task queue. +const LOCAL_QUEUE_SIZE: u16 = 32; + +#[derive(Debug)] +pub struct WorkerPool { + query_id: String, + injector: Injector, + handles: Vec>>, + /// A vector of the pipelines we created. + pipelines: Vec>, +} + +impl WorkerPool { + pub fn start(query_id: String) -> error_stack::Result { + let core_ids = core_affinity::get_core_ids(); + let threads = core_ids + .as_ref() + .map(Vec::len) + .unwrap_or(DEFAULT_THREAD_COUNT); + + let (injector, workers) = Injector::create(threads, LOCAL_QUEUE_SIZE); + + let core_ids = core_ids + .into_iter() + .flatten() + .map(Some) + .chain(std::iter::repeat(None)); + let handles = workers + .into_iter() + .zip(core_ids) + .enumerate() + .map(|(index, (worker, core_id))| { + // Spawn the worker thread. + let span = tracing::info_span!("compute", query_id, index); + std::thread::Builder::new() + .name(format!("compute-{index}")) + .spawn(move || { + let _enter = span.enter(); + + // Set the core affinity, if possible, so this thread always + // executes on the same core. + if let Some(core_id) = core_id { + if core_affinity::set_for_current(core_id) { + tracing::info!( + "Set core affinity for thread {index} to {core_id:?}" + ); + } else { + tracing::info!( + "Failed to set core affinity for thread {index} to {core_id:?}" + ); + } + } else { + tracing::info!("Setting core affinity not supported"); + }; + + // Run the worker + worker.work_loop() + }) + .into_report() + .change_context(Error::SpawnWorker) + }) + .try_collect()?; + + let scheduler = Self { + query_id, + injector, + handles, + pipelines: vec![], + }; + Ok(scheduler) + } + + /// Return the global injector queue. + pub fn injector(&self) -> &Injector { + &self.injector + } + + /// Adds the pipeline to the scheduler and allocates tasks for executing it. + /// + /// `partitions` determines the number of task partitions to allocate. + pub fn add_pipeline(&mut self, partitions: usize, pipeline: T) -> Arc + where + T: Pipeline + 'static, + { + let index = self.pipelines.len(); + let name = std::any::type_name::(); + + // `new_cyclic` provides a `Weak` reference to the pipeline before it is + // created. This allows us to create tasks that reference the pipeline + // (via weak references) and pass those tasks to the pipeline. + let pipeline: Arc = Arc::new_cyclic(move |weak| { + let tasks = (0..partitions) + .map(|partition| -> TaskRef { + let weak: std::sync::Weak = weak.clone(); + let task = Task::new(index, name, weak, partition.into()); + Arc::new(task) + }) + .collect(); + + // We can't create the pipeline here because creating it may have produced errors, + // and `new_cyclic` doesn't support that. So we instead provide the tasks after + // creation, using the infallible `initialize` method. + let mut pipeline = pipeline; + pipeline.initialize(tasks); + pipeline + }); + let pipeline: Arc = pipeline; + self.pipelines.push(pipeline.clone()); + pipeline + } + + pub fn stop(self) -> error_stack::Result<(), Error> { + tracing::info!(self.query_id, "Waiting for completion of query"); + for handle in self.handles { + match handle.join() { + Ok(worker_result) => worker_result?, + Err(_) => { + error_stack::bail!(Error::WorkerPanicked) + } + } + } + + Ok(()) + } +} + +#[derive(derive_more::Display, Debug)] +#[display(fmt = "error creating pipeline '{_0}'")] +pub struct CreateError(&'static str); + +impl error_stack::Context for CreateError {} diff --git a/crates/sparrow-session/Cargo.toml b/crates/sparrow-session/Cargo.toml new file mode 100644 index 000000000..fde919198 --- /dev/null +++ b/crates/sparrow-session/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "sparrow-session" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +publish = false +description = """ +The Sparrow session builder. +""" + +[dependencies] +arrow-array.workspace = true +arrow-schema.workspace = true +arrow-select.workspace = true +derive_more.workspace = true +error-stack.workspace = true +futures.workspace = true +itertools.workspace = true +smallvec.workspace = true +sparrow-api = { path = "../sparrow-api" } +sparrow-compiler = { path = "../sparrow-compiler" } +sparrow-merge = { path = "../sparrow-merge" } +sparrow-runtime = { path = "../sparrow-runtime" } +sparrow-syntax = { path = "../sparrow-syntax" } +sparrow-instructions = { path = "../sparrow-instructions" } +static_init.workspace = true +tokio.workspace = true +tokio-stream.workspace = true +uuid.workspace = true + +[dev-dependencies] + +[lib] +bench = false +doctest = false diff --git a/crates/sparrow-session/src/error.rs b/crates/sparrow-session/src/error.rs new file mode 100644 index 000000000..5589d2d8e --- /dev/null +++ b/crates/sparrow-session/src/error.rs @@ -0,0 +1,31 @@ +use itertools::Itertools; +use sparrow_compiler::NearestMatches; + +#[derive(derive_more::Display, Debug)] +pub enum Error { + #[display(fmt = "failed to create table '{name}'")] + CreateTable { name: String }, + #[display(fmt = "failed to encode schema for table '{_0}'")] + SchemaForTable(String), + #[display(fmt = "invalid expression")] + Invalid, + #[display(fmt = "no function named '{name}'; nearest matches are: {nearest}")] + NoSuchFunction { + name: String, + nearest: NearestMatches, + }, + #[display(fmt = "{}", "_0.iter().join(\"\n\")")] + Errors(Vec), + #[display(fmt = "failed to prepare batch")] + Prepare, + #[display(fmt = "internal error")] + Internal, + #[display(fmt = "compile query")] + Compile, + #[display(fmt = "execute query")] + Execute, + #[display(fmt = "execution failed")] + ExecutionFailed, +} + +impl error_stack::Context for Error {} diff --git a/crates/sparrow-session/src/execution.rs b/crates/sparrow-session/src/execution.rs new file mode 100644 index 000000000..0933d64ee --- /dev/null +++ b/crates/sparrow-session/src/execution.rs @@ -0,0 +1,133 @@ +use arrow_array::RecordBatch; +use futures::future::BoxFuture; +use futures::stream::BoxStream; +use futures::{StreamExt, TryStreamExt}; +use sparrow_api::kaskada::v1alpha::ExecuteResponse; + +use crate::Error; + +pub struct Execution { + /// Tokio runtme managing this execution. + rt: tokio::runtime::Runtime, + /// Channel to receive output on. + output: tokio_stream::wrappers::ReceiverStream, + /// Future which resolves to the first error or None. + status: Status, + /// Stop signal. Send `true` to stop execution. + stop_signal_rx: tokio::sync::watch::Sender, +} + +enum Status { + Running(BoxFuture<'static, error_stack::Result<(), Error>>), + Failed, + Completed, +} + +impl Execution { + pub(super) fn new( + rt: tokio::runtime::Runtime, + output_rx: tokio::sync::mpsc::Receiver, + progress: BoxStream<'static, error_stack::Result>, + stop_signal_rx: tokio::sync::watch::Sender, + ) -> Self { + let output = tokio_stream::wrappers::ReceiverStream::new(output_rx); + let status = Status::Running(Box::pin(async move { + let mut progress = progress; + while (progress.try_next().await?).is_some() {} + Ok(()) + })); + + Self { + rt, + output, + status, + stop_signal_rx, + } + } + + /// Check the status future. + /// + /// If it has previously completed (successfully or with error) returns + /// accordingly. Otherwise, check to see if the future is ready, and update + /// status (and return) accordingly. + fn is_done(&mut self) -> error_stack::Result<(), Error> { + let result = match &mut self.status { + Status::Running(future) => { + // Based on the implementation of `FutureExt::now_or_never`: + let noop_waker = futures::task::noop_waker(); + let mut cx = std::task::Context::from_waker(&noop_waker); + + match future.as_mut().poll(&mut cx) { + std::task::Poll::Ready(x) => x, + _ => return Ok(()), + } + } + Status::Failed => error_stack::bail!(Error::ExecutionFailed), + Status::Completed => return Ok(()), + }; + + match result { + Ok(_) => { + self.status = Status::Completed; + Ok(()) + } + Err(e) => { + self.status = Status::Failed; + Err(e) + } + } + } + + /// Send the stop signal. + /// + /// This method does *not* wait for all batches to be processed. + pub fn stop(&mut self) { + self.stop_signal_rx.send_if_modified(|stop| { + *stop = true; + true + }); + } + + pub async fn next(&mut self) -> error_stack::Result, Error> { + self.is_done()?; + Ok(self.output.next().await) + } + + pub fn next_blocking(&mut self) -> error_stack::Result, Error> { + self.is_done()?; + Ok(self.rt.block_on(self.output.next())) + } + + pub async fn collect_all(self) -> error_stack::Result, Error> { + // TODO: For large outputs, we likely need to drain the output while waiting for the future. + match self.status { + Status::Running(future) => future.await?, + Status::Failed => error_stack::bail!(Error::ExecutionFailed), + _ => {} + }; + + Ok(self.output.collect().await) + } + + pub fn collect_all_blocking(self) -> error_stack::Result, Error> { + // TODO: For large outputs, we likely need to drain the output while waiting for the future. + match self.status { + Status::Running(future) => self.rt.block_on(future)?, + Status::Failed => error_stack::bail!(Error::ExecutionFailed), + _ => {} + }; + + Ok(self.rt.block_on(self.output.collect())) + } +} + +#[cfg(test)] +mod tests { + use crate::Execution; + + #[test] + fn test_send() { + fn assert_send() {} + assert_send::(); + } +} diff --git a/crates/sparrow-session/src/expr.rs b/crates/sparrow-session/src/expr.rs new file mode 100644 index 000000000..0318e6e88 --- /dev/null +++ b/crates/sparrow-session/src/expr.rs @@ -0,0 +1,32 @@ +use arrow_schema::DataType; +use sparrow_compiler::AstDfgRef; +use sparrow_syntax::FenlType; + +#[derive(Clone, Debug)] +pub struct Expr(pub(crate) AstDfgRef); + +impl Expr { + pub fn data_type(&self) -> Option<&DataType> { + match self.0.value_type() { + FenlType::Concrete(data_type) => Some(data_type), + _ => None, + } + } + + pub fn is_continuous(&self) -> bool { + self.0.time_domain().is_continuous() + } + + pub fn grouping(&self) -> Option { + self.0.grouping().map(|grouping| grouping.to_string()) + } +} + +pub enum Literal { + Null, + Bool(bool), + String(String), + Int64(i64), + UInt64(u64), + Float64(f64), +} diff --git a/crates/sparrow-session/src/lib.rs b/crates/sparrow-session/src/lib.rs new file mode 100644 index 000000000..bbc72adef --- /dev/null +++ b/crates/sparrow-session/src/lib.rs @@ -0,0 +1,11 @@ +mod error; +mod execution; +mod expr; +mod session; +mod table; + +pub use error::Error; +pub use execution::Execution; +pub use expr::{Expr, Literal}; +pub use session::{ExecutionOptions, Session}; +pub use table::Table; diff --git a/crates/sparrow-session/src/session.rs b/crates/sparrow-session/src/session.rs new file mode 100644 index 000000000..77c5f3640 --- /dev/null +++ b/crates/sparrow-session/src/session.rs @@ -0,0 +1,489 @@ +use std::borrow::Cow; +use std::collections::HashMap; +use std::sync::Arc; + +use arrow_schema::SchemaRef; +use error_stack::{IntoReport, IntoReportCompat, ResultExt}; +use futures::{StreamExt, TryStreamExt}; +use itertools::Itertools; +use sparrow_api::kaskada::v1alpha::execute_request::Limits; +use sparrow_api::kaskada::v1alpha::{ + ComputeTable, FeatureSet, PerEntityBehavior, TableConfig, TableMetadata, +}; +use sparrow_compiler::{AstDfgRef, CompilerOptions, DataContext, Dfg, DiagnosticCollector}; +use sparrow_instructions::{GroupId, Udf}; +use sparrow_runtime::execute::output::Destination; +use sparrow_runtime::key_hash_inverse::ThreadSafeKeyHashInverse; +use sparrow_syntax::{ExprOp, FenlType, LiteralValue, Located, Location, Resolved}; +use uuid::Uuid; + +use crate::execution::Execution; +use crate::{Error, Expr, Literal, Table}; + +#[derive(Default)] +pub struct Session { + data_context: DataContext, + dfg: Dfg, + key_hash_inverse: HashMap>, +} + +#[derive(Default)] +pub struct ExecutionOptions { + /// The maximum number of rows to return. + pub row_limit: Option, + /// The maximum number of rows to return in a single batch. + pub max_batch_size: Option, + /// Whether to run execute as a materialization or not. + pub materialize: bool, +} + +/// Adds a table to the session. +impl Session { + pub fn add_literal(&mut self, literal: Literal) -> error_stack::Result { + let literal_value = match literal { + Literal::Null => LiteralValue::Null, + Literal::Bool(true) => LiteralValue::True, + Literal::Bool(false) => LiteralValue::False, + Literal::String(s) => LiteralValue::String(s), + Literal::Int64(n) => LiteralValue::Number(n.to_string()), + Literal::UInt64(n) => LiteralValue::Number(n.to_string()), + Literal::Float64(n) => LiteralValue::Number(n.to_string()), + }; + self.add_to_dfg( + ExprOp::Literal(Located::builder(literal_value)), + Resolved::default(), + ) + .map(Expr) + } + + #[allow(clippy::too_many_arguments)] + pub fn add_table( + &mut self, + name: &str, + schema: SchemaRef, + time_column_name: &str, + subsort_column_name: Option<&str>, + key_column_name: &str, + grouping_name: Option<&str>, + time_unit: Option<&str>, + ) -> error_stack::Result { + let uuid = Uuid::new_v4(); + let schema_proto = sparrow_api::kaskada::v1alpha::Schema::try_from(schema.as_ref()) + .into_report() + .change_context_lazy(|| Error::SchemaForTable(name.to_owned()))?; + let table = ComputeTable { + config: Some(TableConfig { + name: name.to_owned(), + uuid: uuid.to_string(), + time_column_name: time_column_name.to_owned(), + subsort_column_name: subsort_column_name.map(|s| s.to_owned()), + group_column_name: key_column_name.to_owned(), + grouping: grouping_name.unwrap_or("").to_owned(), + source: None, + }), + metadata: Some(TableMetadata { + schema: Some(schema_proto), + file_count: 0, + }), + file_sets: vec![], + }; + + let (key_column, key_field) = schema + .column_with_name(key_column_name) + .expect("expected key column"); + + let table_info = self + .data_context + .add_table(table) + .into_report() + .change_context_lazy(|| Error::CreateTable { + name: name.to_owned(), + })?; + + let dfg_node = table_info + .dfg_node(&mut self.dfg) + .into_report() + .change_context(Error::CreateTable { + name: name.to_owned(), + })?; + + let expr = Expr(dfg_node); + + let key_hash_inverse = self + .key_hash_inverse + .entry(table_info.group_id()) + .or_insert_with(|| { + Arc::new(ThreadSafeKeyHashInverse::from_data_type( + key_field.data_type(), + )) + }) + .clone(); + + Table::new(table_info, key_hash_inverse, key_column, expr, time_unit) + } + + pub fn add_cast( + &mut self, + arg: Expr, + data_type: arrow_schema::DataType, + ) -> error_stack::Result { + let op = ExprOp::Cast( + Located::builder(FenlType::Concrete(data_type)), + Location::builder(), + ); + let args = Resolved::new( + Cow::Borrowed(&*CAST_ARGUMENTS), + smallvec::smallvec![Located::builder(arg.0)], + false, + ); + self.add_to_dfg(op, args).map(Expr) + } + + pub fn add_expr( + &mut self, + function: &str, + args: Vec, + ) -> error_stack::Result { + let (op, args) = match function { + "fieldref" => { + assert_eq!(args.len(), 2); + let (base, name) = args.into_iter().collect_tuple().unwrap(); + + let name = self + .dfg + .string_literal(name.0.value()) + .expect("literal name"); + + let op = ExprOp::FieldRef(Located::builder(name.to_owned()), Location::builder()); + let args = Resolved::new( + Cow::Borrowed(&*FIELD_REF_ARGUMENTS), + smallvec::smallvec![Located::builder(base.0)], + false, + ); + (op, args) + } + "record" => { + assert!(args.len() % 2 == 0); + let (names, values): (Vec<_>, _) = args + .into_iter() + .map(|e| Located::builder(e.0)) + .tuples() + .unzip(); + + let names: smallvec::SmallVec<_> = names + .into_iter() + .map(|name| { + name.transform(|name| { + self.dfg + .string_literal(name.value()) + .expect("literal name") + .to_owned() + }) + }) + .collect(); + + let args = Resolved::new(Cow::Owned(names.to_vec()), values, false); + let op = ExprOp::Record(names, Location::builder()); + (op, args) + } + "remove_fields" => { + let values = args.into_iter().map(|e| Located::builder(e.0)).collect(); + let op = ExprOp::RemoveFields(Location::builder()); + let args = Resolved::new( + Cow::Borrowed(&*SELECT_REMOVE_FIELDS_ARGUMENTS), + values, + true, + ); + (op, args) + } + "select_fields" => { + let values = args.into_iter().map(|e| Located::builder(e.0)).collect(); + let op = ExprOp::SelectFields(Location::builder()); + let args = Resolved::new( + Cow::Borrowed(&*SELECT_REMOVE_FIELDS_ARGUMENTS), + values, + true, + ); + (op, args) + } + "extend_record" => { + let values = args.into_iter().map(|e| Located::builder(e.0)).collect(); + let op = ExprOp::ExtendRecord(Location::builder()); + let args = + Resolved::new(Cow::Borrowed(&*RECORD_EXTENSION_ARGUMENTS), values, false); + (op, args) + } + function => { + let op = ExprOp::Call(Located::builder(function.to_owned())); + + let function = match sparrow_compiler::get_function(function) { + Ok(function) => function, + Err(matches) => { + error_stack::bail!(Error::NoSuchFunction { + name: function.to_owned(), + nearest: matches.map(|s| s.to_owned()) + }); + } + }; + + // TODO: Make this a proper error (not an assertion). + let signature = function.internal_signature(); + signature.assert_valid_argument_count(args.len()); + + let has_vararg = + signature.parameters().has_vararg && args.len() > signature.arg_names().len(); + let args = Resolved::new( + signature.arg_names().into(), + args.into_iter() + .map(|arg| Located::builder(arg.0)) + .collect(), + has_vararg, + ); + (op, args) + } + }; + + self.add_to_dfg(op, args).map(Expr) + } + + fn add_to_dfg( + &mut self, + op: ExprOp, + args: Resolved>, + ) -> error_stack::Result { + let feature_set = FeatureSet::default(); + let mut diagnostics = DiagnosticCollector::new(&feature_set); + let result = sparrow_compiler::add_to_dfg( + &mut self.data_context, + &mut self.dfg, + &mut diagnostics, + &op, + args, + None, + ) + .into_report() + .change_context(Error::Invalid)?; + + if diagnostics.num_errors() > 0 { + let errors = diagnostics + .finish() + .into_iter() + .filter(|diagnostic| diagnostic.is_error()) + .map(|diagnostic| diagnostic.formatted) + .collect(); + Err(Error::Errors(errors))? + } else { + Ok(result) + } + } + + /// The [Expr] will call this to add a user-defined-function to the DFG directly. + /// + /// This bypasses much of the plumbing of the [ExprOp] required due to our construction + /// of the AST. + #[allow(unused)] + fn add_udf_to_dfg( + &mut self, + udf: Arc, + args: Vec, + ) -> error_stack::Result { + let signature = udf.signature(); + let arg_names = signature.arg_names().to_owned(); + signature.assert_valid_argument_count(args.len()); + + let has_vararg = + signature.parameters().has_vararg && args.len() > signature.arg_names().len(); + let args = Resolved::new( + arg_names.into(), + args.into_iter() + .map(|arg| Located::builder(arg.0)) + .collect(), + has_vararg, + ); + let feature_set = FeatureSet::default(); + let mut diagnostics = DiagnosticCollector::new(&feature_set); + + let location = Located::builder("udf".to_owned()); + let result = sparrow_compiler::add_udf_to_dfg( + &location, + udf.clone(), + &mut self.dfg, + args, + &mut self.data_context, + &mut diagnostics, + ) + .into_report() + .change_context(Error::Invalid)?; + + if diagnostics.num_errors() > 0 { + let errors = diagnostics + .finish() + .into_iter() + .filter(|diagnostic| diagnostic.is_error()) + .map(|diagnostic| diagnostic.formatted) + .collect(); + Err(Error::Errors(errors))? + } else { + Ok(result) + } + } + + pub fn execute( + &self, + expr: &Expr, + options: ExecutionOptions, + ) -> error_stack::Result { + // TODO: Decorations? + let group_id = expr + .0 + .grouping() + .expect("query to be grouped (non-literal)"); + let primary_group_info = self + .data_context + .group_info(group_id) + .expect("missing group info"); + let primary_grouping = primary_group_info.name().to_owned(); + let primary_grouping_key_type = primary_group_info.key_type(); + + // First, extract the necessary subset of the DFG as an expression. + // This will allow us to operate without mutating things. + let expr = self.dfg.extract_simplest(expr.0.value()); + let expr = expr + .simplify(&CompilerOptions { + ..CompilerOptions::default() + }) + .into_report() + .change_context(Error::Compile)?; + let expr = sparrow_compiler::remove_useless_transforms(expr) + .into_report() + .change_context(Error::Compile)?; + + // TODO: Run the egraph simplifications. + // TODO: Incremental? + // TODO: Slicing? + let plan = sparrow_compiler::plan::extract_plan_proto( + &self.data_context, + expr, + // TODO: Configure per-entity behavior. + PerEntityBehavior::Final, + primary_grouping, + primary_grouping_key_type, + ) + .into_report() + .change_context(Error::Compile)?; + + // Switch to the Tokio async pool. This seems gross. + // Create the runtime. + // + // TODO: Figure out how to asynchronously pass results back to Python? + let rt = tokio::runtime::Runtime::new() + .into_report() + .change_context(Error::Execute)?; + let (output_tx, output_rx) = tokio::sync::mpsc::channel(10); + + let destination = Destination::Channel(output_tx); + let data_context = self.data_context.clone(); + + let (stop_signal_tx, stop_signal_rx) = tokio::sync::watch::channel(false); + let mut options = options.to_sparrow_options(); + options.stop_signal_rx = Some(stop_signal_rx); + + let key_hash_inverse = self + .key_hash_inverse + .get(&group_id) + .cloned() + .unwrap_or_else(|| { + Arc::new(ThreadSafeKeyHashInverse::from_data_type( + primary_grouping_key_type, + )) + }); + + // Hacky. Use the existing execution logic. This weird things with downloading checkpoints, etc. + let progress = rt + .block_on(sparrow_runtime::execute::execute_new( + plan, + destination, + data_context, + options, + Some(key_hash_inverse), + )) + .change_context(Error::Execute)? + .map_err(|e| e.change_context(Error::Execute)) + .boxed(); + + Ok(Execution::new(rt, output_rx, progress, stop_signal_tx)) + } +} + +#[static_init::dynamic] +pub(crate) static FIELD_REF_ARGUMENTS: [Located; 1] = [Located::internal_string("record")]; + +#[static_init::dynamic] +static SELECT_REMOVE_FIELDS_ARGUMENTS: [Located; 2] = [ + Located::internal_string("record"), + Located::internal_string("fields"), +]; + +#[static_init::dynamic] +static RECORD_EXTENSION_ARGUMENTS: [Located; 2] = [ + Located::internal_string("extension"), + Located::internal_string("base"), +]; + +#[static_init::dynamic] +static CAST_ARGUMENTS: [Located; 1] = [Located::internal_string("input")]; + +impl ExecutionOptions { + fn to_sparrow_options(&self) -> sparrow_runtime::execute::ExecutionOptions { + let mut options = sparrow_runtime::execute::ExecutionOptions { + max_batch_size: self.max_batch_size, + materialize: self.materialize, + ..Default::default() + }; + + if let Some(row_limit) = self.row_limit { + options.limits = Some(Limits { + preview_rows: row_limit as i64, + }); + } + + options + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use arrow_schema::{DataType, Field, Schema, TimeUnit}; + + use super::*; + + #[test] + fn session_compilation_test() { + let mut session = Session::default(); + + let schema = Arc::new(Schema::new(vec![ + Field::new( + "time", + DataType::Timestamp(TimeUnit::Nanosecond, None), + false, + ), + Field::new("key", DataType::UInt64, false), + Field::new("a", DataType::UInt64, true), + Field::new("b", DataType::Int64, true), + ])); + let table = session + .add_table("table", schema, "time", None, "key", Some("user"), None) + .unwrap(); + + let field_name = session + .add_literal(Literal::String("a".to_owned())) + .unwrap(); + let field_ref = session + .add_expr("fieldref", vec![table.expr.clone(), field_name]) + .unwrap(); + + assert_eq!(field_ref.data_type(), Some(&DataType::UInt64)); + } +} diff --git a/crates/sparrow-session/src/table.rs b/crates/sparrow-session/src/table.rs new file mode 100644 index 000000000..fb7851b0a --- /dev/null +++ b/crates/sparrow-session/src/table.rs @@ -0,0 +1,96 @@ +use std::sync::Arc; + +use arrow_array::cast::AsArray; +use arrow_array::types::ArrowPrimitiveType; +use arrow_array::RecordBatch; +use arrow_schema::{DataType, Field, Fields, Schema, SchemaRef}; +use error_stack::ResultExt; +use sparrow_compiler::TableInfo; +use sparrow_merge::InMemoryBatches; +use sparrow_runtime::key_hash_inverse::ThreadSafeKeyHashInverse; +use sparrow_runtime::preparer::Preparer; + +use crate::{Error, Expr}; + +pub struct Table { + pub expr: Expr, + preparer: Preparer, + in_memory_batches: Arc, + key_column: usize, + key_hash_inverse: Arc, +} + +impl Table { + pub(crate) fn new( + table_info: &mut TableInfo, + key_hash_inverse: Arc, + key_column: usize, + expr: Expr, + time_unit: Option<&str>, + ) -> error_stack::Result { + let prepared_fields: Fields = KEY_FIELDS + .iter() + .chain(table_info.schema().fields.iter()) + .cloned() + .collect(); + let prepared_schema = Arc::new(Schema::new(prepared_fields)); + let prepare_hash = 0; + + assert!(table_info.in_memory.is_none()); + let in_memory_batches = Arc::new(InMemoryBatches::new(prepared_schema.clone())); + table_info.in_memory = Some(in_memory_batches.clone()); + + let preparer = Preparer::new( + table_info.config().time_column_name.clone(), + table_info.config().subsort_column_name.clone(), + table_info.config().group_column_name.clone(), + prepared_schema, + prepare_hash, + time_unit, + ) + .change_context_lazy(|| Error::CreateTable { + name: table_info.name().to_owned(), + })?; + + Ok(Self { + expr, + preparer, + in_memory_batches, + key_hash_inverse, + key_column: key_column + KEY_FIELDS.len(), + }) + } + + pub fn schema(&self) -> SchemaRef { + self.preparer.schema() + } + + pub fn add_data(&mut self, batch: RecordBatch) -> error_stack::Result<(), Error> { + let prepared = self + .preparer + .prepare_batch(batch) + .change_context(Error::Prepare)?; + + let key_hashes = prepared.column(2).as_primitive(); + let keys = prepared.column(self.key_column); + self.key_hash_inverse + .blocking_add(keys.as_ref(), key_hashes) + .change_context(Error::Prepare)?; + + self.in_memory_batches + .add_batch(prepared) + .change_context(Error::Prepare)?; + Ok(()) + } +} + +#[static_init::dynamic] +static KEY_FIELDS: Vec = vec![ + Arc::new(Field::new( + "_time", + arrow_array::types::TimestampNanosecondType::DATA_TYPE, + false, + )), + Arc::new(Field::new("_subsort", DataType::UInt64, false)), + Arc::new(Field::new("_key_hash", DataType::UInt64, false)), +]; diff --git a/crates/sparrow-syntax/src/parser.rs b/crates/sparrow-syntax/src/parser.rs index 0ff1996d5..d9a2ccb57 100644 --- a/crates/sparrow-syntax/src/parser.rs +++ b/crates/sparrow-syntax/src/parser.rs @@ -5,7 +5,7 @@ use lalrpop_util::lalrpop_mod; -use crate::{ExprRef, FeatureSetPart, Signature}; +use crate::{ExprRef, FeatureSetPart, FenlType, Signature}; lalrpop_mod!( #[allow(clippy::all)] @@ -63,6 +63,14 @@ pub(crate) fn try_parse_arguments( }) } +pub(crate) fn try_parse_type( + part_id: FeatureSetPart, + input: &str, +) -> Result> { + try_parse(input, |errors, lexer| { + grammar::TypeParser::new().parse(part_id, errors, lexer) + }) +} #[inline] fn try_parse( input: &str, diff --git a/crates/sparrow-syntax/src/parser/grammar.lalrpop b/crates/sparrow-syntax/src/parser/grammar.lalrpop index ab89bd1d8..db5a3053e 100644 --- a/crates/sparrow-syntax/src/parser/grammar.lalrpop +++ b/crates/sparrow-syntax/src/parser/grammar.lalrpop @@ -264,8 +264,18 @@ TypeClass: TypeClass = { } } -Type: FenlType = { - => FenlType::from_str(&name).unwrap_or_else(|e| { +pub(crate) Type: FenlType = { + "<" > ">" => { + if name == "list" && types.len() == 1 { + FenlType::Collection(Collection::List, types.to_vec()).normalize() + } else if name == "map" && types.len() == 2 { + FenlType::Collection(Collection::Map, types.to_vec()).normalize() + } else { + errors.push(ParseError::User{ error: (l, format!("Invalid Fenl Type"), r)}); + FenlType::Error + } + }, + => FenlType::from_str(&name).unwrap_or_else(|e| { errors.push(ParseError::User{ error: (l, format!("Invalid Fenl Type '{}'", name), r)}); e }), @@ -275,22 +285,6 @@ Type: FenlType = { } } -ParameterWithTypeVars: String = { - => name.to_string(), - => { - let mut result = String::new(); - result.push_str(name); - result.push('<'); - result.push_str(&type_vars.join(",")); - result.push('>'); - result - }, -} - -TypeVariables: ArgVec<&'input str> = { - "<" > ">" => args, -} - Located: Located = { => { Located::new(v, Location::new(part_id, l, r)) diff --git a/crates/sparrow-syntax/src/syntax/arguments.rs b/crates/sparrow-syntax/src/syntax/arguments.rs index 612481e50..8d1ec1929 100644 --- a/crates/sparrow-syntax/src/syntax/arguments.rs +++ b/crates/sparrow-syntax/src/syntax/arguments.rs @@ -17,7 +17,7 @@ use crate::{FenlType, Located, Location}; /// The parameter names for a single method must be unique. /// /// Required parameters must appear before optional parameters. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Hash)] #[cfg_attr(test, derive(serde::Serialize))] pub struct Parameters { /// The names of each parameter. @@ -170,7 +170,7 @@ impl Parameters { pub type ArgVec = SmallVec<[T; 2]>; /// Represents a single argument, which is either positional or keyword. -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(test, derive(serde::Serialize))] pub enum Argument { /// An argument passed by position -- eg., `5` in `foo(5, x = 6)`. @@ -210,7 +210,7 @@ impl Argument { /// /// Before being used the arguments must be resolved against the parameters of /// the corresponding function. -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[cfg_attr(test, derive(serde::Serialize))] #[repr(transparent)] pub struct Arguments(ArgVec>); @@ -462,6 +462,16 @@ pub struct Resolved { pub has_vararg: bool, } +impl Default for Resolved { + fn default() -> Self { + Self { + names: Default::default(), + values: Default::default(), + has_vararg: Default::default(), + } + } +} + impl Resolved { pub fn empty() -> Self { Self { diff --git a/crates/sparrow-syntax/src/syntax/expr.rs b/crates/sparrow-syntax/src/syntax/expr.rs index bb5edbe94..91e51bcd2 100644 --- a/crates/sparrow-syntax/src/syntax/expr.rs +++ b/crates/sparrow-syntax/src/syntax/expr.rs @@ -8,7 +8,7 @@ use crate::parser::try_parse_expr; use crate::{ArgVec, Arguments, FenlType, LiteralValue, ParseErrors, Resolved}; /// Identifies a specific part of a feature set query. -#[derive(Clone, Copy, Debug, Eq, PartialEq)] +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] #[cfg_attr(test, derive(serde::Serialize))] pub enum FeatureSetPart { /// Internal definitions that don't reference contents of FeatureSet. @@ -25,12 +25,14 @@ pub enum FeatureSetPart { Formula(u32), /// The query. Query, + /// Code coming from python. + Builder, } /// The location of part of an expression in the original source. /// /// Contains the start and end position in bytes within the source. -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] #[cfg_attr(test, derive(serde::Serialize))] pub struct Location { /// The part of the feature set this location is in. @@ -46,6 +48,10 @@ impl Location { Self { part, start, end } } + pub fn builder() -> Self { + Location::new(FeatureSetPart::Builder, 0, "builder".len()) + } + pub fn internal_str(value: &'static str) -> Self { Self { part: FeatureSetPart::Internal(value), @@ -107,6 +113,13 @@ impl Located { Self { value, location } } + pub fn builder(value: T) -> Self { + Self { + value, + location: Location::builder(), + } + } + pub fn inner(&self) -> &T { &self.value } @@ -134,6 +147,13 @@ impl Located { } } + pub fn try_map(self, f: impl FnOnce(T) -> Result) -> Result, E> { + Ok(Located { + value: f(self.value)?, + location: self.location, + }) + } + pub fn with_value(&self, value: T2) -> Located { Located { value, @@ -200,7 +220,7 @@ impl std::hash::Hash for Located { /// can be passed around without copying. pub type ExprRef = Arc; -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] #[cfg_attr(test, derive(serde::Serialize))] pub struct Expr { op: ExprOp, @@ -236,7 +256,7 @@ impl ResolvedExpr { } } -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] #[cfg_attr(test, derive(serde::Serialize))] pub enum ExprOp { Literal(Located), diff --git a/crates/sparrow-syntax/src/syntax/fenl_type.rs b/crates/sparrow-syntax/src/syntax/fenl_type.rs index de2c70cfa..d6bc4eb0b 100644 --- a/crates/sparrow-syntax/src/syntax/fenl_type.rs +++ b/crates/sparrow-syntax/src/syntax/fenl_type.rs @@ -1,12 +1,13 @@ +use std::fmt::Display; use std::str::FromStr; -use std::{fmt::Display, sync::Arc}; +use std::sync::Arc; use arrow_schema::{DataType, Field, FieldRef, Fields, IntervalUnit, TimeUnit}; use itertools::Itertools; use serde::Serialize; use sparrow_arrow::scalar_value::timeunit_suffix; -use crate::TypeVariable; +use crate::{try_parse_type, FeatureSetPart, ParseErrors, TypeVariable}; /// A wrapper around an Arrow `DataType`. /// @@ -25,7 +26,7 @@ pub enum FenlType { /// e.g. (Collection::Map, [TypeVariable("K"), TypeVariable("V")]) /// /// TODO(https://github.com/kaskada-ai/kaskada/issues/494): Support FenlType - Collection(Collection, Vec), + Collection(Collection, Vec), /// A type for describing a windowing behavior. Window, /// A type for describing a string that will be interpreted @@ -38,7 +39,7 @@ pub enum FenlType { Error, } -#[derive(Clone, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Debug, PartialEq, Eq, Hash, Copy)] #[cfg_attr(test, derive(serde::Serialize))] pub enum Collection { List, @@ -82,6 +83,9 @@ impl<'a> std::fmt::Display for FormatDataType<'a> { write!(fmt, "{}", FormatStruct(fields)) } DataType::Date32 => fmt.write_str("date32"), + DataType::List(f) => { + write!(fmt, "list<{}>", FormatDataType(f.data_type())) + } DataType::Map(f, _) => match f.data_type() { DataType::Struct(fields) => { write!( @@ -243,57 +247,6 @@ impl FromStr for FenlType { "duration_ns" => Ok(DataType::Duration(TimeUnit::Nanosecond).into()), "window" => Ok(FenlType::Window), "json" => Ok(FenlType::Json), - // TODO(https://github.com/kaskada-ai/kaskada/issues/494): Support fenl types - // in collections - s if s.starts_with("list<") && s.ends_with('>') => { - let type_var = &s[5..s.len() - 1] - .split(',') - .map(|s| s.trim()) - .collect::>(); - - // One type var for a list - if type_var.len() != 1 { - return Err(FenlType::Error); - } - - match FenlType::from_str(type_var[0])? { - FenlType::Concrete(dt) => { - let f = Field::new("item", dt, true); - Ok(DataType::List(Arc::new(f)).into()) - } - FenlType::TypeRef(type_var) => { - Ok(FenlType::Collection(Collection::List, vec![type_var])) - } - other => panic!("unexpected type: {:?}", other), - } - } - s if s.starts_with("map<") && s.ends_with('>') => { - let type_var = &s[4..s.len() - 1] - .split(',') - .map(|s| s.trim()) - .collect::>(); - - // Two type vars for a map - if type_var.len() != 2 { - return Err(FenlType::Error); - } - let key_type = FenlType::from_str(type_var[0])?; - let value_type = FenlType::from_str(type_var[1])?; - - match (key_type, value_type) { - (FenlType::Concrete(kt), FenlType::Concrete(vt)) => { - let f1 = Field::new("key", kt, true); - let f2 = Field::new("value", vt, true); - let s = DataType::Struct(Fields::from(vec![f1, f2])); - let f = Field::new("entries", s, true); - Ok(DataType::Map(Arc::new(f), false).into()) - } - (FenlType::TypeRef(ktv), FenlType::TypeRef(vtv)) => { - Ok(FenlType::Collection(Collection::Map, vec![ktv, vtv])) - } - (_, _) => unimplemented!("map with concrete and type variable mix"), - } - } s => Ok(FenlType::TypeRef(TypeVariable(s.to_owned()))), } } @@ -312,6 +265,80 @@ impl From<&Field> for FenlType { } impl FenlType { + /// Normalize concrete collection types. + pub fn normalize(self) -> Self { + match self { + FenlType::Collection(c, args) if args.iter().all(|t| t.is_concrete()) => { + match c { + Collection::List => { + // Note: the `name` and `nullability` here are the standard, and cannot be changed, + // or we will have schema mismatches later during execution. + // + // That said, there's no reason why a later arrow version can't change this behavior. + // TODO: Figure out how to pass user naming and nullability through inference. + let item = args.into_iter().exactly_one().unwrap(); + let item = item.take_arrow_type().unwrap(); + let field = Arc::new(Field::new("item", item, true)); + FenlType::Concrete(DataType::List(field)) + } + Collection::Map => { + assert!( + args.len() == 2, + "map must have two type arguments, was {args:?}" + ); + let (key, value) = args.into_iter().collect_tuple().unwrap(); + let key = key.take_arrow_type().unwrap(); + let value = value.take_arrow_type().unwrap(); + + // Note that the `name` and `nullability` are the standard, and cannot be changed, + // or we may have schema mismatches later during execution. + // + // That said, there's no reason why a later arrow version can't change this behavior. + // TODO: Figure out how to pass user naming and nullability through inference. + let key_field = Field::new("keys", key, false); + let value_field = Field::new("values", value, true); + + let fields = Fields::from(vec![key_field, value_field]); + let entries = DataType::Struct(fields); + let entries = Arc::new(Field::new("entries", entries, false)); + FenlType::Concrete(DataType::Map(entries, false)) + } + } + } + other => other, + } + } + + pub fn collection_args(&self, collection: &Collection) -> Option> { + match self { + FenlType::Collection(c, args) if c == collection => Some(args.clone()), + FenlType::Concrete(data_type) => match (collection, data_type) { + (Collection::List, DataType::List(field)) => { + Some(vec![FenlType::Concrete(field.data_type().clone())]) + } + (Collection::Map, DataType::Map(field, _)) => { + let DataType::Struct(fields) = field.data_type() else { + panic!("Map type has a struct type with key/value") + }; + Some(vec![ + FenlType::Concrete(fields[0].data_type().clone()), + FenlType::Concrete(fields[1].data_type().clone()), + ]) + } + _ => None, + }, + _ => None, + } + } + + pub fn is_concrete(&self) -> bool { + matches!(self, FenlType::Concrete(_)) + } + + pub fn try_from_str(part_id: FeatureSetPart, input: &str) -> Result> { + try_parse_type(part_id, input) + } + pub fn is_error(&self) -> bool { matches!(self, FenlType::Error) } @@ -345,3 +372,46 @@ impl FenlType { } } } + +#[cfg(test)] +mod tests { + use crate::{Collection, FeatureSetPart, FenlType}; + use arrow::datatypes::DataType; + + #[test] + fn test_parse() { + let parse = |input| { + let part_id = FeatureSetPart::Internal(input); + crate::parser::try_parse_type(part_id, input).unwrap() + }; + + let i32 = FenlType::Concrete(DataType::Int32); + let i64 = FenlType::Concrete(DataType::Int64); + let t = FenlType::TypeRef(crate::TypeVariable("T".to_owned())); + assert_eq!(parse("i32"), i32); + assert_eq!(parse("i64"), i64); + assert_eq!( + parse("map"), + FenlType::Collection(Collection::Map, vec![i32.clone(), i64.clone()]).normalize() + ); + assert_eq!( + parse("map"), + FenlType::Collection(Collection::Map, vec![t.clone(), i64.clone()]) + ); + assert_eq!( + parse("list>"), + FenlType::Collection( + Collection::List, + vec![FenlType::Collection(Collection::List, vec![i32]).normalize()] + ) + .normalize() + ); + assert_eq!( + parse("list>"), + FenlType::Collection( + Collection::List, + vec![FenlType::Collection(Collection::List, vec![t])] + ) + ); + } +} diff --git a/crates/sparrow-syntax/src/syntax/signature.rs b/crates/sparrow-syntax/src/syntax/signature.rs index 789b31da8..7083b5678 100644 --- a/crates/sparrow-syntax/src/syntax/signature.rs +++ b/crates/sparrow-syntax/src/syntax/signature.rs @@ -1,3 +1,4 @@ +use hashbrown::HashSet; use itertools::Itertools; use crate::parser::try_parse_signature; @@ -48,7 +49,7 @@ impl TypeParameter { } /// The signature of an operator or function. -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Hash)] #[cfg_attr(test, derive(serde::Serialize))] pub struct Signature { /// The name of the operator or function. @@ -65,6 +66,77 @@ pub struct Signature { result: FenlType, } +fn check_signature( + name: &str, + parameters: &Parameters, + type_parameters: &[TypeParameter], + result: &FenlType, +) -> anyhow::Result<()> { + fn visit_type<'a>(type_vars: &mut HashSet<&'a TypeVariable>, ty: &'a FenlType) { + match ty { + FenlType::TypeRef(type_var) => { + type_vars.insert(type_var); + } + FenlType::Collection(_, coll_types) => { + for type_var in coll_types { + visit_type(type_vars, type_var); + } + } + _ => {} + } + } + + // collect all the type variables and verify they are defined + let mut type_vars = HashSet::new(); + let defined: HashSet<_> = type_parameters.iter().map(|p| &p.name).collect(); + + for t in parameters.types() { + visit_type(&mut type_vars, t.inner()); + } + + // check that all type variables defined are used in the parameters. + // we do this before adding the result since we can't infer the type + // for a type variable used only in the return. + for tp in type_parameters { + anyhow::ensure!( + type_vars.contains(&&tp.name), + "Type variable '{:?}' is defined in the type parameters for signature '{}', + but is not used in the parameters", + tp.name, + name + ); + } + + visit_type(&mut type_vars, result); + + // check that all referenced type variables are defined + for type_var in type_vars { + anyhow::ensure!( + defined.contains(&type_var), + "Type variable '{:?}' is not defined in the type parameters for signature: '{:?}'", + type_var, + name + ); + } + + // check that no duplicates exist in the type_parameters + let duplicates: Vec<_> = type_parameters + .iter() + .map(|p| &p.name.0) + .duplicates() + .collect(); + if !duplicates.is_empty() { + anyhow::bail!( + "Duplicate type parameters: {} in signature for '{name}'", + duplicates + .iter() + .format_with(",", |elt, f| f(&format_args!("'{elt}'"))) + ); + }; + + Ok(()) +} + impl Signature { pub(crate) fn try_new( name: String, @@ -72,54 +144,7 @@ impl Signature { type_parameters: Vec, result: FenlType, ) -> anyhow::Result { - // collect all the type variables and verify they are defined - let mut type_vars = Vec::new(); - for t in parameters.types() { - match t.inner() { - FenlType::TypeRef(type_var) => { - verify_is_defined(type_var, &type_parameters, &name)?; - type_vars.push(type_var) - } - FenlType::Collection(_, coll_types) => { - for type_var in coll_types { - verify_is_defined(type_var, &type_parameters, &name)?; - type_vars.push(type_var); - } - } - _ => (), - } - } - if let FenlType::TypeRef(type_var) = &result { - verify_is_defined(type_var, &type_parameters, &name)?; - type_vars.push(type_var); - } - - // check that all type parameters defined are used in the parameters or result - for tp in &type_parameters { - anyhow::ensure!( - type_vars.iter().contains(&&tp.name), - "Type variable '{:?}' is defined in the type parameters for signature '{}', - but is not used in the parameters or result", - tp.name, - name - ); - } - - // check that no duplicates exist in the type_parameters - let duplicates: Vec<_> = type_parameters - .iter() - .map(|p| &p.name.0) - .duplicates() - .collect(); - if !duplicates.is_empty() { - anyhow::bail!( - "Duplicate type parameters: {} in signature for '{name}'", - duplicates - .iter() - .format_with(",", |elt, f| f(&format_args!("'{elt}'"))) - ); - }; - + check_signature(&name, ¶meters, &type_parameters, &result)?; Ok(Self { name, parameters, @@ -160,7 +185,7 @@ impl Signature { } else { assert!( num_args == self.parameters.names().len(), - "Expected operator '{:?}' to have exactly {:?} arguments, but was {:?}", + "Expected operator '{:?}' to have exactly {:?} arguments, but was {:?} ({self:?})", self.name, self.parameters.names().len(), num_args, @@ -169,22 +194,6 @@ impl Signature { } } -/// Verifies the type variable is defined in the type parameters. -fn verify_is_defined( - type_var: &TypeVariable, - type_parameters: &[TypeParameter], - signature: &str, -) -> anyhow::Result<()> { - if !type_parameters.iter().any(|tp| &tp.name == type_var) { - anyhow::bail!( - "Type variable '{:?}' is not defined in the type parameters for signature: '{:?}'", - type_var, - signature - ) - } - Ok(()) -} - #[cfg(test)] mod tests { diff --git a/crates/sparrow-transforms/Cargo.toml b/crates/sparrow-transforms/Cargo.toml new file mode 100644 index 000000000..74cf6a7ca --- /dev/null +++ b/crates/sparrow-transforms/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "sparrow-transforms" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +publish = false +description = """ +Implementation of transforms and pipeline for executing them. +""" + +[dependencies] +arrow-array.workspace = true +arrow-schema.workspace = true +derive_more.workspace = true +error-stack.workspace = true +itertools.workspace = true +parking_lot.workspace = true +sparrow-arrow = { path = "../sparrow-arrow" } +sparrow-expressions = { path = "../sparrow-expressions" } +sparrow-physical = { path = "../sparrow-physical" } +sparrow-scheduler = { path = "../sparrow-scheduler" } +tracing.workspace = true + +[dev-dependencies] + +[lib] +doctest = false diff --git a/crates/sparrow-transforms/src/lib.rs b/crates/sparrow-transforms/src/lib.rs new file mode 100644 index 000000000..439fdbb7a --- /dev/null +++ b/crates/sparrow-transforms/src/lib.rs @@ -0,0 +1,22 @@ +#![warn( + rust_2018_idioms, + nonstandard_style, + future_incompatible, + clippy::mod_module_files, + clippy::print_stdout, + clippy::print_stderr, + clippy::undocumented_unsafe_blocks +)] + +//! Pipeline for executing 1 or more transform. +//! +//! Transforms are simpler than pipelines -- they apply processing logic to an +//! input batch to produce an output batch. Only the last trasnform in a pipeline +//! may affect the keys associated with rows -- after that a repartition pipeline +//! must be executed to move data to the appropriate partitions. + +mod project; +mod transform; +mod transform_pipeline; + +pub use transform_pipeline::*; diff --git a/crates/sparrow-transforms/src/project.rs b/crates/sparrow-transforms/src/project.rs new file mode 100644 index 000000000..011ce84e4 --- /dev/null +++ b/crates/sparrow-transforms/src/project.rs @@ -0,0 +1,55 @@ +use arrow_array::RecordBatch; +use arrow_schema::SchemaRef; +use error_stack::{IntoReport, ResultExt}; +use sparrow_arrow::Batch; + +use sparrow_expressions::ExpressionExecutor; +use sparrow_physical::Exprs; + +use crate::transform::{Error, Transform}; + +/// Transform for projection. +pub struct Project { + evaluators: ExpressionExecutor, + outputs: Vec, + schema: SchemaRef, +} + +impl Project { + pub fn try_new( + input_schema: &SchemaRef, + exprs: &Exprs, + schema: SchemaRef, + ) -> error_stack::Result { + let evaluators = ExpressionExecutor::try_new(input_schema.as_ref(), exprs.exprs.as_vec()) + .change_context_lazy(|| Error::CreateTransform("project"))?; + Ok(Self { + evaluators, + outputs: exprs.outputs.iter().map(|n| (*n).into()).collect(), + schema, + }) + } +} + +impl Transform for Project { + fn apply(&self, batch: Batch) -> error_stack::Result { + assert!(!batch.is_empty()); + + let error = || Error::ExecuteTransform("project"); + let columns = self.evaluators.execute(&batch).change_context_lazy(error)?; + let columns = self + .outputs + .iter() + .map(|index| columns[*index].clone()) + .collect(); + + let result = RecordBatch::try_new(self.schema.clone(), columns) + .into_report() + .change_context_lazy(error)?; + Ok(batch.with_projection(result)) + } + + fn name(&self) -> &'static str { + std::any::type_name::() + } +} diff --git a/crates/sparrow-transforms/src/transform.rs b/crates/sparrow-transforms/src/transform.rs new file mode 100644 index 000000000..7db2a6a68 --- /dev/null +++ b/crates/sparrow-transforms/src/transform.rs @@ -0,0 +1,24 @@ +use sparrow_arrow::Batch; + +#[derive(derive_more::Display, Debug)] +pub enum Error { + #[display(fmt = "failed to create {_0} transform")] + CreateTransform(&'static str), + #[display(fmt = "failed to execute {_0} transform")] + ExecuteTransform(&'static str), +} + +impl error_stack::Context for Error {} + +/// Trait implementing a transform, executed as part of a [TransformPipeline]. +pub(crate) trait Transform: Send + Sync { + /// Name of the transform. + /// + /// This will default to the name of the struct implementing the transform. + fn name(&self) -> &'static str { + std::any::type_name::() + } + + /// Apply the transfrom to the given input batch. + fn apply(&self, batch: Batch) -> error_stack::Result; +} diff --git a/crates/sparrow-transforms/src/transform_pipeline.rs b/crates/sparrow-transforms/src/transform_pipeline.rs new file mode 100644 index 000000000..7f4be085c --- /dev/null +++ b/crates/sparrow-transforms/src/transform_pipeline.rs @@ -0,0 +1,264 @@ +use std::collections::VecDeque; +use std::sync::atomic::{AtomicBool, Ordering}; + +use error_stack::ResultExt; +use itertools::Itertools; +use parking_lot::Mutex; +use sparrow_arrow::Batch; +use sparrow_physical::{StepId, StepKind}; +use sparrow_scheduler::{ + Partition, Partitioned, Pipeline, PipelineError, PipelineInput, Scheduler, TaskRef, +}; + +use crate::transform::Transform; + +/// Runs a linear sequence of transforms as a pipeline. +pub struct TransformPipeline { + /// The state for each partition. + partitions: Partitioned, + transforms: Vec>, + /// Sink for the down-stream computation. + sink: PipelineInput, +} + +impl std::fmt::Debug for TransformPipeline { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TransformPipeline") + .field( + "transforms", + &self.transforms.iter().map(|t| t.name()).format(","), + ) + .finish() + } +} + +struct TransformPartition { + /// Whether this partition is closed. + is_closed: AtomicBool, + /// Inputs for this partition. + /// + /// TODO: This could use a thread-safe queue to avoid locking. + inputs: Mutex>, + /// Task for this partition. + task: TaskRef, +} + +impl TransformPartition { + /// Close the input. Returns true if the input buffer is empty. + fn close_input(&self) -> bool { + self.is_closed.store(true, Ordering::Release); + self.inputs.lock().is_empty() + } + + fn is_input_closed(&self) -> bool { + self.is_closed.load(Ordering::Acquire) + } + + fn is_input_empty(&self) -> bool { + self.inputs.lock().is_empty() + } + + fn add_input(&self, batch: Batch) { + self.inputs.lock().push_back(batch); + } + + fn pop_input(&self) -> Option { + self.inputs.lock().pop_front() + } +} + +#[derive(derive_more::Display, Debug)] +pub enum Error { + #[display(fmt = "transforms should accept exactly 1 input, but length for '{kind}' was {len}")] + TooManyInputs { kind: &'static str, len: usize }, + #[display(fmt = "invalid transform: expected input {expected} but was {actual}")] + UnexpectedInput { expected: StepId, actual: StepId }, + #[display(fmt = "step '{kind}' is not supported as a transform")] + UnsupportedStepKind { kind: &'static str }, + #[display(fmt = "failed to create transform for step '{kind}'")] + CreatingTransform { kind: &'static str }, +} + +impl error_stack::Context for Error {} + +impl TransformPipeline { + pub fn try_new<'a>( + input_step: &sparrow_physical::Step, + steps: impl Iterator + ExactSizeIterator, + sink: PipelineInput, + ) -> error_stack::Result { + let mut input_step = input_step; + let mut transforms = Vec::with_capacity(steps.len()); + for step in steps { + error_stack::ensure!( + step.inputs.len() == 1, + Error::TooManyInputs { + kind: (&step.kind).into(), + len: step.inputs.len() + } + ); + error_stack::ensure!( + step.inputs[0] == input_step.id, + Error::UnexpectedInput { + expected: input_step.id, + actual: step.inputs[0] + } + ); + + let transform: Box = match &step.kind { + StepKind::Project { exprs } => Box::new( + crate::project::Project::try_new( + &input_step.schema, + exprs, + step.schema.clone(), + ) + .change_context_lazy(|| Error::CreatingTransform { + kind: (&step.kind).into(), + })?, + ), + unsupported => { + error_stack::bail!(Error::UnsupportedStepKind { + kind: unsupported.into() + }) + } + }; + transforms.push(transform); + input_step = step; + } + Ok(Self { + partitions: Partitioned::default(), + transforms, + sink, + }) + } +} + +impl Pipeline for TransformPipeline { + fn initialize(&mut self, tasks: Partitioned) { + self.partitions = tasks + .into_iter() + .map(|task| TransformPartition { + is_closed: AtomicBool::new(false), + inputs: Mutex::new(VecDeque::new()), + task, + }) + .collect(); + } + + fn add_input( + &self, + input_partition: Partition, + input: usize, + batch: Batch, + scheduler: &mut dyn Scheduler, + ) -> error_stack::Result<(), PipelineError> { + error_stack::ensure!( + input == 0, + PipelineError::InvalidInput { + input, + input_len: 1 + } + ); + let partition = &self.partitions[input_partition]; + error_stack::ensure!( + !partition.is_input_closed(), + PipelineError::InputClosed { + input, + input_partition + } + ); + + partition.add_input(batch); + scheduler.schedule(partition.task.clone()); + Ok(()) + } + + fn close_input( + &self, + input_partition: Partition, + input: usize, + scheduler: &mut dyn Scheduler, + ) -> error_stack::Result<(), PipelineError> { + error_stack::ensure!( + input == 0, + PipelineError::InvalidInput { + input, + input_len: 1 + } + ); + let partition = &self.partitions[input_partition]; + error_stack::ensure!( + !partition.is_input_closed(), + PipelineError::InputClosed { + input, + input_partition + } + ); + + // Don't close the sink here. We may be currently executing a `do_work` + // loop, in which case we need to allow it to output to the sink before + // we close it. + partition.close_input(); + scheduler.schedule(partition.task.clone()); + + Ok(()) + } + + fn do_work( + &self, + input_partition: Partition, + scheduler: &mut dyn Scheduler, + ) -> error_stack::Result<(), PipelineError> { + let partition = &self.partitions[input_partition]; + + let Some(batch) = partition.pop_input() else { + error_stack::ensure!( + partition.is_input_closed(), + PipelineError::illegal_state("scheduled without work") + ); + return self.sink.close_input(input_partition, scheduler); + }; + + tracing::trace!( + "Performing work for partition {input_partition} on {} rows", + batch.num_rows() + ); + + // If the batch is non empty, process it. + if !batch.is_empty() { + let mut batch = batch; + for transform in self.transforms.iter() { + batch = transform + .apply(batch) + .change_context(PipelineError::Execution)?; + + // Exit the sequence of transforms early if the batch is empty. + // Transforms don't add rows. + if batch.is_empty() { + break; + } + } + + // If the result is non-empty, output it. + if !batch.is_empty() { + self.sink + .add_input(input_partition, batch, scheduler) + .change_context(PipelineError::Execution)?; + } + } + + // If the input is closed and empty, then we should close the sink. + if partition.is_input_closed() && partition.is_input_empty() { + self.sink + .close_input(input_partition, scheduler) + .change_context(PipelineError::Execution)?; + } + + // Note: We don't re-schedule the transform if there is input. + // This should be handled by the fact that we scheduled the transform + // when we added the batch, which should trigger the "scheduled during + // execution" -> "re-schedule" logic (see ScheduleCount). + + Ok(()) + } +} diff --git a/deny.toml b/deny.toml index f1967a556..656102919 100644 --- a/deny.toml +++ b/deny.toml @@ -59,9 +59,6 @@ ignore = [ # Not used in runtime code. # https://github.com/bheisler/criterion.rs/issues/629 "RUSTSEC-2021-0145", - - # Advisory: https://rustsec.org/advisories/RUSTSEC-2023-0034 - "RUSTSEC-2023-0034", ] # Threshold for security vulnerabilities, any vulnerability with a CVSS score # lower than the range specified will be ignored. Note that ignored advisories diff --git a/docs-src/modules/ROOT/pages/loading-data.adoc b/docs-src/modules/ROOT/pages/loading-data.adoc index 3a12e2124..f22a7aede 100644 --- a/docs-src/modules/ROOT/pages/loading-data.adoc +++ b/docs-src/modules/ROOT/pages/loading-data.adoc @@ -29,9 +29,7 @@ Python:: [source,python] ---- from kaskada import table -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() table.load( table_name = "Purchase", @@ -88,9 +86,7 @@ Python:: [source,python] ---- from kaskada import table -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() table.load( table_name = "Purchase", @@ -138,9 +134,7 @@ Python:: [source,python] ---- from kaskada import table -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() table.load( table_name = "Purchase", diff --git a/docs-src/modules/developing/pages/materializations.adoc b/docs-src/modules/developing/pages/materializations.adoc index 9cf04b4e7..c37b9001c 100644 --- a/docs-src/modules/developing/pages/materializations.adoc +++ b/docs-src/modules/developing/pages/materializations.adoc @@ -118,9 +118,7 @@ Python:: [source,python] ---- from kaskada import materialization -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() materialization.list_materializations() ---- @@ -144,9 +142,6 @@ Python:: [source,python] ---- from kaskada import materialization -from kaskada.api.session import LocalBuilder - -session = LocalBuilder().build() materialization.get_materialization("PurchaseStats") ---- @@ -176,9 +171,7 @@ Python:: [source,python] ---- from kaskada import materialization -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() materialization.delete_materialization("PurchaseStats") ---- diff --git a/docs-src/modules/developing/pages/queries.adoc b/docs-src/modules/developing/pages/queries.adoc index 21cd81a80..e7bdee8f4 100644 --- a/docs-src/modules/developing/pages/queries.adoc +++ b/docs-src/modules/developing/pages/queries.adoc @@ -14,12 +14,12 @@ To learn more about Fenl: == Query syntax quickstart -Kaskada's query language builds on the lessons of 50+ years of query language design to provide a declarative, composable, easy-to-read, and type-safe way of describing computations related to time. +Kaskada's query language builds on the lessons of 50+ years of query language design to provide a declarative, composable, easy-to-read, and type-safe way of describing computations related to time. The following is a quick overview of the query language's main features and syntax. === Viewing and filtering the contents of a table -Kaskada queries are built by composing simple expressions. +Kaskada queries are built by composing simple expressions. Every expression returns a timeline. [source,Fenl] @@ -86,7 +86,7 @@ Pipe syntax allows multiple operations to be chained together. Write your operat ---- { largest_spend_over_2_purchases: purchase.amount - | when(Purchase.category == "food") + | when(Purchase.category == "food") | sum(window=sliding(2, Purchase.category == "food")) # Inner aggregation | max() # Outer aggregation } @@ -135,7 +135,7 @@ let purchases_yesterday = in { purchases_in_last_day: purchases_now - purchases_yesterday } ---- -In this example we take the timeline produced by `purchases_now` and move it forward in time by one day using the `xref:fenl:catalog.adoc#shift-by[shift_by()]` function. +In this example we take the timeline produced by `purchases_now` and move it forward in time by one day using the `xref:fenl:catalog.adoc#shift-by[shift_by()]` function. We then subtract the shifted value from the original, unshifted value === Simple, composable syntax @@ -149,7 +149,7 @@ let cadence = hourly() # Anything can be named and re-used let hourly_big_purchases = Purchase | when(Purchase.amount > 10) -# Filter anywhere +# Filter anywhere | count(window=since(cadence)) # Aggregate anything | when(cadence) @@ -243,10 +243,10 @@ from kaskada.api.session import LocalBuilder session = LocalBuilder().build() -query = """{ - time: Purchase.purchase_time, - entity: Purchase.customer_id, - max_amount: last(Purchase.amount) | max(), +query = """{ + time: Purchase.purchase_time, + entity: Purchase.customer_id, + max_amount: last(Purchase.amount) | max(), min_amount: Purchase.amount | min() }""" @@ -272,9 +272,7 @@ views stored in the system. `"all-results"` _(default)_, or `"final-results"` which returns only the final values for each entity. * *response_as*: Determines how the response is returned. Either -`"parquet"` _(default)_ or `"redis-bulk"`. -** If `"redis-bulk"`, result_behavior is assumed to be -`"final-results"`. +`"parquet"` _(default)_ or `"csv"`. * *data_token_id*: Enables repeatable queries. Queries performed against the same data token always run on the same input data. * *limits*: Configures limits on the output set. @@ -317,8 +315,7 @@ available: `all-results` _(default)_, or `final-results` which returns only the final values for each entity. * *--output*: Output format for the query results. One of `df` dataframe -_(default)_, `json`, `parquet` or `redis-bulk`. -** If `redis-bulk`, --result-behavior is assumed to be `final-results`. +_(default)_, `json`, or `parquet`. * *--data-token*: Enables repeatable queries. Queries performed against the same data token always run on the same input data. * *--preview-rows*: Produces a preview of the data with at least this @@ -515,7 +512,7 @@ transactions Returns a dataframe of 71599 rows, instead of the full dataset of 100000 rows. -[NOTE] +[NOTE] ==== It may seem odd that many thousands of rows were returned when only 50 were requested. This happens because query operates on batches @@ -536,12 +533,12 @@ To query Kaskada using the CLI, the query string should be provided on `STDIN`. An easy way to define a query is to create a text file containing the query. [source,Fenl] -.query.fenl +.query.fenl ---- -{ - time: Purchase.purchase_time, - entity: Purchase.customer_id, - max_amount: last(Purchase.amount) | max(), +{ + time: Purchase.purchase_time, + entity: Purchase.customer_id, + max_amount: last(Purchase.amount) | max(), min_amount: Purchase.amount | min() } ---- @@ -567,11 +564,11 @@ By default, query results are written to a Parquet file: the locations of these } ---- -The resulting files are stored in the JSON path `outputTo.objectStore.outputPaths.paths` as an array of paths. +The resulting files are stored in the JSON path `outputTo.objectStore.outputPaths.paths` as an array of paths. [TIP] ==== -To slice and/or filter JSON output we can use https://stedolan.github.io/jq/[jq]. +To slice and/or filter JSON output we can use https://stedolan.github.io/jq/[jq]. ==== [source,bash] diff --git a/docs-src/modules/developing/pages/tables.adoc b/docs-src/modules/developing/pages/tables.adoc index c5f29f3ff..3c7798bc8 100644 --- a/docs-src/modules/developing/pages/tables.adoc +++ b/docs-src/modules/developing/pages/tables.adoc @@ -43,9 +43,7 @@ Python:: [source,python] ---- from kaskada import table -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() table.create_table( # The table's name @@ -91,9 +89,7 @@ Python:: [source,python] ---- from kaskada import table -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() table.list_tables() ---- @@ -117,9 +113,7 @@ Python:: [source,python] ---- from kaskada import table -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() table.get_table("Purchase") ---- @@ -148,9 +142,7 @@ Python:: [source,python] ---- from kaskada import table -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() table.delete_table("Purchase") ---- @@ -182,9 +174,7 @@ Python:: [source,python] ---- from kaskada import table -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() table.delete_table("Purchase", force = True) ---- diff --git a/docs-src/modules/developing/pages/views.adoc b/docs-src/modules/developing/pages/views.adoc index 91209af08..4ed9fd500 100644 --- a/docs-src/modules/developing/pages/views.adoc +++ b/docs-src/modules/developing/pages/views.adoc @@ -93,9 +93,7 @@ without re-typing the expression: [source,python] ---- from kaskada import view -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() view.create_view( view_name = "PurchaseStats", @@ -117,9 +115,7 @@ Python:: [source,python] ---- from kaskada import view -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() view.list_views() ---- @@ -143,9 +139,7 @@ Python:: [source,python] ---- from kaskada import view -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() view.get_view("PurchaseStats") ---- @@ -174,9 +168,7 @@ Python:: [source,python] ---- from kaskada import view -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() view.delete_view("PurchaseStats") ---- @@ -203,9 +195,7 @@ Python:: [source,python] ---- from kaskada import view -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() view.delete_view("PurchaseStats", force = True) ---- diff --git a/docs-src/modules/getting-started/pages/hello-world-jupyter.adoc b/docs-src/modules/getting-started/pages/hello-world-jupyter.adoc index ecf633653..9d2dd0022 100644 --- a/docs-src/modules/getting-started/pages/hello-world-jupyter.adoc +++ b/docs-src/modules/getting-started/pages/hello-world-jupyter.adoc @@ -114,6 +114,8 @@ session = LocalBuilder().keep_alive(False).build() ==== +Once we have a Kaskada `session` created we can start now interacting with the Kaskada components. + Let's now create a small table and write a simple query to see that everything is working correctly with our setup. === Enable the Kaskada magic command @@ -165,9 +167,7 @@ When creating a table, you must tell Kaskada which columns contain the time and [source,python] ---- from kaskada import table -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() table.create_table( # The table's name @@ -241,9 +241,7 @@ load the contents of a Parquet file into the table. [source,python] ---- from kaskada import table -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() # A sample Parquet file provided by Kaskada for testing # Available at https://drive.google.com/uc?export=download&id=1SLdIw9uc0RGHY-eKzS30UBhN0NJtslkk @@ -379,9 +377,7 @@ When you're done with this tutorial, you can delete the table you created in ord [source,python] ---- from kaskada import table -from kaskada.api.session import LocalBuilder -session = LocalBuilder().build() table.delete_table( # The table's name diff --git a/docs-src/modules/installing/pages/local.adoc b/docs-src/modules/installing/pages/local.adoc index 6d1d2c329..a05824cc0 100644 --- a/docs-src/modules/installing/pages/local.adoc +++ b/docs-src/modules/installing/pages/local.adoc @@ -141,8 +141,7 @@ optional arguments: repeatable queries. --debug DEBUG Shows debugging information --output OUTPUT Output format for the query results. One of "df" - (default), "json", "parquet" or "redis-bulk". "redis- - bulk" implies --result-behavior "final-results" + (default), "json", or "parquet". --preview-rows PREVIEW_ROWS Produces a preview of the data with at least this many rows. diff --git a/examples/Customer_Retention_(OSS).ipynb b/examples/Customer_Retention_(OSS).ipynb index 6dce401fc..a66085354 100644 --- a/examples/Customer_Retention_(OSS).ipynb +++ b/examples/Customer_Retention_(OSS).ipynb @@ -467,6 +467,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "id": "9b400634-fa26-4202-9fc7-951f2d431532", "metadata": { @@ -481,7 +482,7 @@ "- Kaskada connects directly the event-based data available in production\n", "- Data scientists define the predictor features used to power training sets\n", "- Data and ML Engineers call Kaskada to compute the **same** features at the time of now() in production\n", - "- Kaskada provides production grade targets such as Redis for feature and model serving" + "- Kaskada provides production grade targets such as Pulsar for feature and model serving" ] }, { diff --git a/examples/slackbot/Notebook.ipynb b/examples/slackbot/Notebook.ipynb new file mode 100644 index 000000000..0708159eb --- /dev/null +++ b/examples/slackbot/Notebook.ipynb @@ -0,0 +1,316 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "235d821b-1ff8-4ef6-8f0b-559c95254479", + "metadata": {}, + "source": [ + "# Slackbot Example\n", + "\n", + "SlackBot keeps you in the loop without disturbing your focus. Its personalized, intelligent AI continuously monitors your Slack workspace, alerting you to important conversations and freeing you to concentrate on what’s most important.\n", + "\n", + "SlackBot reads the full history of your (public) Slack workspace and trains a Generative AI model to predict when you need to engage with a conversation. This training process gives the AI a deep understanding of your interests, expertise, and relationships. Using this understanding, SlackBot watches conversations in real-time and notifies you when an important conversation is happening without you. With SlackBot200 you can focus on getting things done without worrying about missing out.\n", + "\n", + "In this notebook, you’ll see you how to build and deploy SlackBot in 15 minutes using only OpenAI’s API’s and open-source Python libraries - Data Science PhD not required.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "70440303", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install openai kaskada" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "61ea2e95-6d9d-4068-ab98-8cf94bc4d9d0", + "metadata": {}, + "outputs": [], + "source": [ + "from datetime import datetime, timedelta\n", + "from slack_sdk.socket_mode import SocketModeClient, SocketModeResponse\n", + "import sparrow_pi as kt\n", + "import openai\n", + "import getpass\n", + "import pyarrow\n", + "\n", + "# Initialize Kaskada with a local execution context.\n", + "kt.init_session()\n", + "\n", + "# Initialize OpenAI\n", + "openai.api_key = getpass.getpass('OpenAI: API Key')\n", + "\n", + "# Initialize Slack\n", + "slack = SocketModeClient(\n", + " app_token=getpass.getpass('Slack: App Token'),\n", + " web_client=getpass.getpass('Slack: Bot Token'),\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "9b8a144d-8d79-4943-b99b-d3470ee96283", + "metadata": {}, + "source": [ + "## Prompt Engineering" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7e6fedb9", + "metadata": {}, + "outputs": [], + "source": [ + "def build_conversation(messages):\n", + " message_time = messages.col(\"ts\")\n", + " last_message_time = message_time.lag(1) # !!!\n", + " is_new_conversation = message_time.seconds_since(last_message_time) > 10 * 60\n", + "\n", + " return messages \\\n", + " .select(\"user\", \"ts\", \"text\", \"reactions\") \\\n", + " .collect(window=kt.windows.Since(is_new_conversation), max=100)" + ] + }, + { + "cell_type": "markdown", + "id": "9247233a", + "metadata": {}, + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fdb2d959-d371-4026-9f8d-4ab26cfbf317", + "metadata": {}, + "outputs": [], + "source": [ + "def build_examples(messages):\n", + " duration = datetime.timedelta(minutes=5)\n", + "\n", + " coverstation = build_conversation(messages)\n", + " shifted_coversation = coverstation.shift_by(duration) # !!!\n", + "\n", + " reaction_users = coverstation.col(\"reactions\").col(\"name\").collect(kt.windows.Trailing(duration)).flatten() # !!!\n", + " participating_users = coverstation.col(\"user\").collect(kt.windows.Trailing(duration)) # !!!\n", + " engaged_users = kt.union(reaction_users, participating_users) # !!!\n", + "\n", + " return kt.record({ \"prompt\": shifted_coversation, \"completion\": engaged_users}) \\\n", + " .filter(shifted_coversation.is_not_null())" + ] + }, + { + "cell_type": "markdown", + "id": "0035f558-23bd-4b4d-95a0-ed5e8fece673", + "metadata": {}, + "source": [ + "## Fine-tune the model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "af7d2a45-eb89-47ce-b471-a39ad8c7bbc7", + "metadata": {}, + "outputs": [], + "source": [ + "import pandas\n", + "import sparrow_pi.sources as sources\n", + "\n", + "messages = kt.sources.Parquet(\"./messages.parquet\", time = \"ts\", entity = \"channel\")\n", + "messages = messages.with_key(kt.record({ # !!!\n", + " \"channel\": messages.col(\"channel\"),\n", + " \"thread\": messages.col(\"thread_ts\"),\n", + " }))\n", + "examples = build_examples(messages)\n", + "\n", + "examples_df = examples.run().to_pandas()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fa93a8db", + "metadata": {}, + "outputs": [], + "source": [ + "from sklearn import preprocessing\n", + "\n", + "le = preprocessing.LabelEncoder()\n", + "le.fit(examples_df.completion.explode())\n", + "\n", + "# Format for the OpenAI API\n", + "def format_prompt(prompt):\n", + " return \"start -> \" + \"\\n\\n\".join([f' {msg[\"user\"]} --> {msg[\"text\"]} ' for msg in prompt]) + \"\\n\\n###\\n\\n\"\n", + "examples_df.prompt = examples_df.prompt.apply(format_prompt)\n", + "\n", + "def format_completion(completion):\n", + " return \" \" + (\" \".join([le.transform(u) for u in completion]) if len(completion) > 0 else \"nil\") + \" end\"\n", + "examples_df.completion = examples_df.completion.apply(format_completion)\n", + "\n", + "# Write examples to file\n", + "examples_df.to_json(\"examples.jsonl\", orient='records', lines=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "83914ada-d108-422b-b4c0-7a0d9576d031", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "from types import SimpleNamespace\n", + "from openai import cli\n", + "\n", + "# verifiy data format, split for training & validation\n", + "args = SimpleNamespace(file='./examples.jsonl', quiet=True)\n", + "cli.FineTune.prepare_data(args)\n", + "training_id = cli.FineTune._get_or_upload('./examples_prepared_train.jsonl', True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9a60b77c", + "metadata": {}, + "outputs": [], + "source": [ + "import openai\n", + "\n", + "resp = openai.FineTune.create(\n", + " training_file = training_id,\n", + " model = \"davinci\",\n", + " n_epochs = 2,\n", + " learning_rate_multiplier = 0.02,\n", + " suffix = \"coversation_users\"\n", + ")\n", + "print(f'Fine-tuning model with job ID: \"{resp[\"id\"]}\"')" + ] + }, + { + "cell_type": "markdown", + "id": "b3e29109-cc00-4bf5-ba23-069e8db1f179", + "metadata": { + "jp-MarkdownHeadingCollapsed": true, + "tags": [] + }, + "source": [ + "## Notify users of conversations they need to know about" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "540afff7-4ebc-427f-8205-1ed145e0c59a", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "import json, math\n", + "\n", + "min_prob_for_response = 0.50\n", + "\n", + "# Receive Slack messages in real-time\n", + "live_messages = kt.sources.ArrowSource(entity_column=\"channel\", time_column=\"ts\")\n", + "\n", + "# Receive messages from Slack\n", + "def handle_message(client, req):\n", + " # Acknowledge the message back to Slack\n", + " client.send_socket_mode_response(SocketModeResponse(envelope_id=req.envelope_id))\n", + " \n", + " if req.type == \"events_api\" and \"event\" in req.payload:\n", + " e = req.payload[\"event\"]\n", + " if \"previous_message\" in e or e[\"type\"] == \"reaction_added\":\n", + " return\n", + " # send message events to Kaskada\n", + " live_messages.add_event(pyarrow.json.read_json(e))\n", + "\n", + "slack.socket_mode_request_listeners.append(handle_message)\n", + "slack.connect()\n", + "\n", + "# Handle messages in realtime\n", + "# A \"conversation\" is a list of messages\n", + "for conversation in build_conversation(live_messages).start().to_generator():\n", + " if len(conversation) == 0:\n", + " continue\n", + " \n", + " # Ask the model who should be notified\n", + " res = openai.Completion.create(\n", + " model=\"davinci:ft-personal:coversation-users-full-kaskada-2023-08-05-14-25-30\", \n", + " prompt=format_prompt(conversation),\n", + " logprobs=5,\n", + " max_tokens=1,\n", + " stop=\" end\",\n", + " temperature=0,\n", + " )\n", + "\n", + " users = []\n", + " logprobs = res[\"choices\"][0][\"logprobs\"][\"top_logprobs\"][0]\n", + " for user in logprobs:\n", + " if math.exp(logprobs[user]) > min_prob_for_response:\n", + " user = users.strip()\n", + " # if users include `nil`, stop processing\n", + " if user == \"nil\":\n", + " users = []\n", + " break\n", + " users.append(user)\n", + "\n", + " # alert on most recent message in conversation\n", + " msg = conversation.pop()\n", + " \n", + " # Send notification to users\n", + " for user in users:\n", + " user_id = le.inverse_transform(user)\n", + "\n", + " # get user channel for slackbot\n", + " app = slack.web_client.users_conversations(\n", + " types=\"im\",\n", + " user=user_id,\n", + " )\n", + " \n", + " # confirm user has slackbot installed\n", + " if len(app[\"channels\"]) == 0:\n", + " continue\n", + "\n", + " link = slack.web_client.chat_getPermalink(\n", + " channel=msg[\"channel\"],\n", + " message_ts=msg[\"ts\"],\n", + " )[\"permalink\"]\n", + " \n", + " slack.web_client.chat_postMessage(\n", + " channel=app[\"channels\"][0][\"id\"],\n", + " text=f'You may be interested in this converstation: <{link}|{msg[\"text\"]}>'\n", + " )" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/slackbot/OpenAI.ipynb b/examples/slackbot/OpenAI.ipynb new file mode 100644 index 000000000..fb897810e --- /dev/null +++ b/examples/slackbot/OpenAI.ipynb @@ -0,0 +1,326 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "49e90573", + "metadata": {}, + "outputs": [], + "source": [ + "!pip install -q openai" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "51b20a52", + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import openai\n", + "\n", + "openai.api_key = getpass.getpass('OpenAI: API Key')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "152523e1", + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Set\n", + "from abc import ABC, abstractmethod\n", + "\n", + "class SlackBotMessage(ABC):\n", + " \"\"\"Defines a base slack bot message. This class is intended to be extended from rather than used.\n", + " \"\"\"\n", + " @abstractmethod\n", + " def to_request(self):\n", + " pass\n", + "\n", + "class SystemContent(SlackBotMessage):\n", + " \"\"\"SystemContent are messages that are prefixed to a conversation and provide context to the LLM.\n", + " \"\"\"\n", + " def __init__(self, content: str):\n", + " self.content = content\n", + "\n", + " def to_request(self):\n", + " return {\"role\": \"system\", \"content\": self.content}\n", + "\n", + "class QueryContent(SlackBotMessage):\n", + " \"\"\"QueryContent is a message that specifies the users topic to the chat bot for reaching out.\n", + " \"\"\"\n", + " def __init__(self, content: str):\n", + " self.content = content\n", + " \n", + " def to_request(self):\n", + " return {\"role\": \"user\", \"content\": f\"Who should I reach out to about: {self.content}\"}\n", + "\n", + "class SlackMessage(SlackBotMessage):\n", + " \"\"\"SlackMessage are the messages from Slack that provide additional context to the query.\n", + " \"\"\"\n", + " def __init__(self, username: str, content: str):\n", + " self.username = username\n", + " self.content = content\n", + "\n", + " def to_request(self):\n", + " return {\"role\": \"system\", \"content\": f\"{self.username}: {self.content}\"}\n", + "\n", + "class PostProcessor:\n", + " \"\"\"PostProcessor parses the response from OpenAI.\n", + " \"\"\"\n", + " def __init__(self, users: Set[str]):\n", + " self.users = users\n", + "\n", + " def get_users_from_message(self, message: str) -> Set[str]:\n", + " print(message)\n", + " poc_users = set()\n", + " for user in self.users:\n", + " if user in message:\n", + " poc_users.add(user)\n", + " return poc_users\n", + "\n", + "class SlackBot:\n", + " \"\"\"SlackBot is the entry-point to the example of Kaskada + OpenAI + LLMs.\n", + " \"\"\"\n", + " def __init__(self, model_name: str = \"gpt-3.5-turbo\", max_tokens: int = 25):\n", + " self.model_name = model_name\n", + " self.max_tokens = max_tokens\n", + " self.users = set()\n", + " self.intro_message = SystemContent(\"You are a helpful assistant designed to suggest the names of people who would best be points of contacts for a specific topic based on messages.\")\n", + " self.messages = []\n", + " self.post_processor = PostProcessor(self.users)\n", + " \n", + " def get_subset_users_message(self):\n", + " return SystemContent(f\"Only respond as a JSON object with any subset of these usernames who would be very interested, or return an empty set if no one would be interested: {self.users}\".replace(\"[\", \"\").replace(\"]\", \"\")).to_request()\n", + " \n", + " def get_format_message(self):\n", + " return SystemContent(\"Messages are formatted as username: topic of message\").to_request()\n", + " \n", + " def add_message(self, username: str, content: str):\n", + " message = SlackMessage(username, content)\n", + " self.users.add(username)\n", + " self.messages.append(message.to_request())\n", + " \n", + " def __create_conversion(self, query: QueryContent):\n", + " return [self.intro_message.to_request(),\\\n", + " self.get_subset_users_message(),\\\n", + " self.get_format_message()] + self.messages + [query.to_request()]\n", + " \n", + " def query(self, query: str) -> Set[str]:\n", + " messages = self.__create_conversion(QueryContent(query))\n", + " conversation = openai.ChatCompletion.create(\n", + " model=self.model_name,\n", + " messages=messages,\n", + " max_tokens=self.max_tokens,\n", + " temperature=0\n", + " )\n", + " response = conversation.choices[0].message.content\n", + " return self.post_processor.get_users_from_message(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17ec8ffb", + "metadata": {}, + "outputs": [], + "source": [ + "sample_bot = SlackBot()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "19269fd2", + "metadata": {}, + "outputs": [], + "source": [ + "# Add some sample messages\n", + "sample_bot.add_message(\"@kevin.nguyen\", \"Vector search databases are the future for LLMs. They enable to growth and optimizations of queries\")\n", + "sample_bot.add_message(\"@ryan.michael\", \"Kaskada with DataStax enables faster-streaming LLMs than traditional lang-chain models\")\n", + "sample_bot.add_message(\"@eric.pinzur\", \"Helm charts are how we should deploy the future of architecture of microservices\")\n", + "sample_bot.add_message(\"@ben.chambers\", \"Python FFIs and Rust compilation give us a much better experience than our current implementation\")\n", + "sample_bot.add_message(\"@jordan.frazier\", \"here’s list in type inference and index support\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6bbc6362", + "metadata": {}, + "outputs": [], + "source": [ + "sample_bot.query(\"FFIs\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9b85475b", + "metadata": {}, + "outputs": [], + "source": [ + "sample_bot.query(\"The database I am using is a vector based implementation derived from Cassandra on Astra. There appears to be a problem with the scale.\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b81e8cdf", + "metadata": {}, + "outputs": [], + "source": [ + "sample_bot.query(\"Vector search databases\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6fe14d01", + "metadata": {}, + "outputs": [], + "source": [ + "sample_bot.query(\"I want to know more about Kaskadas ML\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5b465e2b", + "metadata": {}, + "outputs": [], + "source": [ + "sample_bot.query(\"How do I onboard?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b7196d75", + "metadata": {}, + "outputs": [], + "source": [ + "sample_bot.query(\"Awkward Tacos\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "587b6a70", + "metadata": {}, + "outputs": [], + "source": [ + "# Example from the last few messages from the Slack Kaskada Eng\n", + "# https://datastax.slack.com/archives/C04J75DMUSG/p1690824490676389\n", + "kaskada_eng_bot = SlackBot()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3caa4554", + "metadata": {}, + "outputs": [], + "source": [ + "# Add all the messages from the recent thread\n", + "kaskada_eng_bot.add_message(\"@ben.chambers\", \"If yes: then since count(E) ignores null elements, then collect(E) needs to ignore elements\")\n", + "kaskada_eng_bot.add_message(\"@ben.chambers\", \"count(E) == len(collect(E, max=null)) <- should this be true?\")\n", + "kaskada_eng_bot.add_message(\"@jordan.frazier\", \"(i.e. @Ryan Michael Should collect() collect null values into the list?\")\n", + "kaskada_eng_bot.add_message(\"@ryan.michael\", \"That’s an interesting question\")\n", + "kaskada_eng_bot.add_message(\"@ben.chambers\", \"It’s also interesting, because right now most aggregations produce null if they haven’t seen any non-null values. But count produces 0 and collect will produce the empty list. So it feels like we may want a relationship between them.\")\n", + "kaskada_eng_bot.add_message(\"@ben.chambers\", '''That’s also nice because it lets us do something like:\n", + "E.value | if(E.interesting) | collect(max=10)\n", + "To collect “10 interesting thnigs”''')\n", + "kaskada_eng_bot.add_message(\"@ben.chambers\", '''And we can always put a null value in a non-null struct:\n", + "({ E.maybe_null } | collect(max=10)).maybe_null if we want to collect 10 potentially-null values.''')\n", + "kaskada_eng_bot.add_message(\"@jordan.frazier\", '''count produces 0 if it only sees null values (since it doesn’t count null).\n", + "len(collect()) produces null if it skips null values (contradictory — count(E) != len(collect(E))''')\n", + "kaskada_eng_bot.add_message(\"@ben.chambers\", '''Why? Why not have it produces an empty list if it hasn’t seen any values? It doesn’t take any space, and makes it relate to count?''')\n", + "kaskada_eng_bot.add_message(\"@jordan.frazier\", '''That’s right — I was thinking of “skipping” as “ignoring” the input entirely, but that doesn’t make sense. If it sees a null it will either produce the current list or the empty list if none exists''')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e195c523", + "metadata": {}, + "outputs": [], + "source": [ + "last_message_on_thread = '''If anybody wants to comment (maybe @Brian Godsey), I added the question to the doc.'''" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "07791f6d", + "metadata": {}, + "outputs": [], + "source": [ + "kaskada_eng_bot.query(last_message_on_thread)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d193bdf5", + "metadata": {}, + "outputs": [], + "source": [ + "kaskada_eng_bot.query(\"I think skipping the enitre input is necessary.\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9e1a9a62", + "metadata": {}, + "outputs": [], + "source": [ + "kaskada_eng_bot.query(\"another random octopus tacos vector me search\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f9e4e8d6", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "68545536", + "metadata": {}, + "source": [ + "1. Providing the list of users is not a scalable approach\n", + "2. We are not going to provide the whole chat history (this is what fine tuning is for)\n", + "3. Do we want to allow the model to choose whether or not to return a person or empty set? Should someone always be notified? \"Only return a name if you're very confident\"\n", + "4. Validation metric? Condition in which we create training examples. E.g. if we know a specific history resulted in a choice, then we can rank it. Recognize if there are important people or it just doesn't know." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/slackbot/README.md b/examples/slackbot/README.md new file mode 100644 index 000000000..ee26b707b --- /dev/null +++ b/examples/slackbot/README.md @@ -0,0 +1,51 @@ +# SlackBot Example + +## Data Prep + +```sql +copy( + select + type + subtype , + to_timestamp(cast(ts as bigint)) as ts, + user , + text , + -- client_msg_id , + -- blocks , + team , + user_team , + source_team , + user_profile , + inviter , + edited , + reactions , + thread_ts , + reply_count , + reply_users_count , + latest_reply , + -- reply_users , + -- replies , + is_locked , + subscribed , + last_read , + parent_user_id , + regexp_extract(filename, 'data/iloveai-initial-export/(.+)/\d{4}-\d{2}-\d{2}.json', 1) as channel + from ( + select * from read_json_auto('data/iloveai-initial-export/*/*.json', + format='array', + filename=true, + union_by_name=true) + ) +) to 'messages.parquet' (FORMAT PARQUET) +; + + select + from ( + select * from read_json_auto('data/iloveai-initial-export/*/*.json', + format='array', + filename=true, + union_by_name=true) + ) + limit 10 +; +``` \ No newline at end of file diff --git a/examples/slackbot/data/iloveai-initial-export/canvases.json b/examples/slackbot/data/iloveai-initial-export/canvases.json new file mode 100644 index 000000000..4ae772191 --- /dev/null +++ b/examples/slackbot/data/iloveai-initial-export/canvases.json @@ -0,0 +1,3 @@ +[ +"" +] \ No newline at end of file diff --git a/examples/slackbot/data/iloveai-initial-export/channels.json b/examples/slackbot/data/iloveai-initial-export/channels.json new file mode 100644 index 000000000..157d71c4e --- /dev/null +++ b/examples/slackbot/data/iloveai-initial-export/channels.json @@ -0,0 +1,71 @@ +[ +{ + "id": "C05JMQ6PJ2Z", + "name": "demo", + "created": 1690314266, + "creator": "U05JQJJDJ6P", + "is_archived": false, + "is_general": false, + "members": [ + "U05JH8BCZST", + "U05JQJJDJ6P", + "U05JV3K9RB7" + ], + "topic": { + "value": "", + "creator": "", + "last_set": 0 + }, + "purpose": { + "value": "This channel is for everything #demo. Hold meetings, share docs, and make decisions together with your team.", + "creator": "U05JQJJDJ6P", + "last_set": 1690314266 + } +}, +{ + "id": "C05K3B1V717", + "name": "random", + "created": 1690314133, + "creator": "U05JQJJDJ6P", + "is_archived": false, + "is_general": false, + "members": [ + "U05JH8BCZST", + "U05JQJJDJ6P", + "U05JV3K9RB7" + ], + "topic": { + "value": "", + "creator": "", + "last_set": 0 + }, + "purpose": { + "value": "This channel is for... well, everything else. It’s a place for team jokes, spur-of-the-moment ideas, and funny GIFs. Go wild!", + "creator": "U05JQJJDJ6P", + "last_set": 1690314133 + } +}, +{ + "id": "C05KED4JDNC", + "name": "general", + "created": 1690314132, + "creator": "U05JQJJDJ6P", + "is_archived": false, + "is_general": true, + "members": [ + "U05JH8BCZST", + "U05JQJJDJ6P", + "U05JV3K9RB7" + ], + "topic": { + "value": "", + "creator": "", + "last_set": 0 + }, + "purpose": { + "value": "This is the one channel that will always include everyone. It’s a great spot for announcements and team-wide conversations.", + "creator": "U05JQJJDJ6P", + "last_set": 1690314132 + } +} +] \ No newline at end of file diff --git a/examples/slackbot/data/iloveai-initial-export/demo/2023-07-25.json b/examples/slackbot/data/iloveai-initial-export/demo/2023-07-25.json new file mode 100644 index 000000000..20e37a16a --- /dev/null +++ b/examples/slackbot/data/iloveai-initial-export/demo/2023-07-25.json @@ -0,0 +1,9 @@ +[ + { + "type": "message", + "subtype": "channel_join", + "ts": "1690314266.912869", + "user": "U05JQJJDJ6P", + "text": "<@U05JQJJDJ6P> has joined the channel" + } +] \ No newline at end of file diff --git a/examples/slackbot/data/iloveai-initial-export/demo/2023-07-26.json b/examples/slackbot/data/iloveai-initial-export/demo/2023-07-26.json new file mode 100644 index 000000000..de0ca7156 --- /dev/null +++ b/examples/slackbot/data/iloveai-initial-export/demo/2023-07-26.json @@ -0,0 +1,62 @@ +[ + { + "client_msg_id": "515ba80e-a0d5-482f-bb16-3eda5a0ef153", + "type": "message", + "text": "old message in demo channel", + "user": "U05JQJJDJ6P", + "ts": "1690360209.651159", + "blocks": [ + { + "type": "rich_text", + "block_id": "Egod", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "old message in demo channel" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "g18ebd8787ba", + "image_72": "https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-72.png", + "first_name": "eric", + "real_name": "eric", + "display_name": "", + "team": "T05JA5XCR9D", + "name": "eric.pinzur", + "is_restricted": false, + "is_ultra_restricted": false + } + }, + { + "type": "message", + "subtype": "channel_join", + "ts": "1690365352.090379", + "user": "U05K6LQRDKK", + "text": "<@U05K6LQRDKK> has joined the channel", + "inviter": "U05JQJJDJ6P" + }, + { + "type": "message", + "subtype": "channel_join", + "ts": "1690400220.366319", + "user": "U05JH8BCZST", + "text": "<@U05JH8BCZST> has joined the channel" + }, + { + "type": "message", + "subtype": "channel_join", + "ts": "1690402590.304289", + "user": "U05JV3K9RB7", + "text": "<@U05JV3K9RB7> has joined the channel" + } +] \ No newline at end of file diff --git a/examples/slackbot/data/iloveai-initial-export/general/2023-07-25.json b/examples/slackbot/data/iloveai-initial-export/general/2023-07-25.json new file mode 100644 index 000000000..62bc62c7f --- /dev/null +++ b/examples/slackbot/data/iloveai-initial-export/general/2023-07-25.json @@ -0,0 +1,9 @@ +[ + { + "type": "message", + "subtype": "channel_join", + "ts": "1690314133.159559", + "user": "U05JQJJDJ6P", + "text": "<@U05JQJJDJ6P> has joined the channel" + } +] \ No newline at end of file diff --git a/examples/slackbot/data/iloveai-initial-export/general/2023-07-26.json b/examples/slackbot/data/iloveai-initial-export/general/2023-07-26.json new file mode 100644 index 000000000..dd779c5ef --- /dev/null +++ b/examples/slackbot/data/iloveai-initial-export/general/2023-07-26.json @@ -0,0 +1,227 @@ +[ + { + "client_msg_id": "1403fc2b-6e15-4a71-a865-91693333bf42", + "type": "message", + "text": "old message 1", + "user": "U05JQJJDJ6P", + "ts": "1690360175.262899", + "blocks": [ + { + "type": "rich_text", + "block_id": "C9F", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "old message 1" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "g18ebd8787ba", + "image_72": "https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-72.png", + "first_name": "eric", + "real_name": "eric", + "display_name": "", + "team": "T05JA5XCR9D", + "name": "eric.pinzur", + "is_restricted": false, + "is_ultra_restricted": false + } + }, + { + "client_msg_id": "7654a13d-b6ec-44a2-b915-4124d03d49e7", + "type": "message", + "text": "old message 2", + "user": "U05JQJJDJ6P", + "ts": "1690360176.582019", + "blocks": [ + { + "type": "rich_text", + "block_id": "2Nl", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "old message 2" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "g18ebd8787ba", + "image_72": "https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-72.png", + "first_name": "eric", + "real_name": "eric", + "display_name": "", + "team": "T05JA5XCR9D", + "name": "eric.pinzur", + "is_restricted": false, + "is_ultra_restricted": false + }, + "edited": { + "user": "U05JQJJDJ6P", + "ts": "1690360188.000000" + } + }, + { + "type": "message", + "subtype": "channel_join", + "ts": "1690365335.003959", + "user": "U05K6LQRDKK", + "text": "<@U05K6LQRDKK> has joined the channel", + "inviter": "U05JQJJDJ6P" + }, + { + "type": "message", + "subtype": "channel_join", + "ts": "1690400220.249569", + "user": "U05JH8BCZST", + "text": "<@U05JH8BCZST> has joined the channel" + }, + { + "type": "message", + "subtype": "channel_join", + "ts": "1690402590.070869", + "user": "U05JV3K9RB7", + "text": "<@U05JV3K9RB7> has joined the channel" + }, + { + "client_msg_id": "133f7576-ea9b-4124-b338-e7ac1fb06a06", + "type": "message", + "text": "new message 1", + "user": "U05JV3K9RB7", + "ts": "1690402705.256229", + "blocks": [ + { + "type": "rich_text", + "block_id": "crT2", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "new message 1" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "356a52f83d46", + "image_72": "https:\/\/avatars.slack-edge.com\/2023-07-26\/5644057970994_356a52f83d46cdf189f1_72.jpg", + "first_name": "Charna", + "real_name": "Charna Parkey", + "display_name": "Charna Parkey", + "team": "T05JA5XCR9D", + "name": "charna.parkey", + "is_restricted": false, + "is_ultra_restricted": false + }, + "reactions": [ + { + "name": "eyes", + "users": [ + "U05JQJJDJ6P" + ], + "count": 1 + } + ] + }, + { + "client_msg_id": "1b9c702d-19d8-4cb3-852d-60a0b9b0871f", + "type": "message", + "text": "Hello Charna, welcome to Slack!", + "user": "U05JH8BCZST", + "ts": "1690403464.272689", + "blocks": [ + { + "type": "rich_text", + "block_id": "V9d3V", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "Hello Charna, welcome to Slack!" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "b2cae8d05d70", + "image_72": "https:\/\/avatars.slack-edge.com\/2023-07-26\/5646266959572_b2cae8d05d70db19c666_72.png", + "first_name": "Ryan", + "real_name": "Ryan Michael", + "display_name": "Ryan Michael", + "team": "T05JA5XCR9D", + "name": "ryan.michael", + "is_restricted": false, + "is_ultra_restricted": false + } + }, + { + "client_msg_id": "a5cd4f42-e136-48e9-a54d-62398855e64f", + "type": "message", + "text": "Feel free to ask any questions about Kaskada you may have!", + "user": "U05JH8BCZST", + "ts": "1690403478.876699", + "blocks": [ + { + "type": "rich_text", + "block_id": "z2Q", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "Feel free to ask any questions about Kaskada you may have!" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "b2cae8d05d70", + "image_72": "https:\/\/avatars.slack-edge.com\/2023-07-26\/5646266959572_b2cae8d05d70db19c666_72.png", + "first_name": "Ryan", + "real_name": "Ryan Michael", + "display_name": "Ryan Michael", + "team": "T05JA5XCR9D", + "name": "ryan.michael", + "is_restricted": false, + "is_ultra_restricted": false + } + } +] \ No newline at end of file diff --git a/examples/slackbot/data/iloveai-initial-export/integration_logs.json b/examples/slackbot/data/iloveai-initial-export/integration_logs.json new file mode 100644 index 000000000..41d9defd5 --- /dev/null +++ b/examples/slackbot/data/iloveai-initial-export/integration_logs.json @@ -0,0 +1,6 @@ +[ +{"user_id":"U05JQJJDJ6P","user_name":"eric.pinzur","date":"1690443618","change_type":"removed","reason":"user_uninstalled_app","app_type":"OpenSourceAirbyteConnection","app_id":"A05JDF4JY7R"}, +{"user_id":"U05JQJJDJ6P","user_name":"eric.pinzur","date":"1690365230","change_type":"added","app_type":"OpenSourceAirbyteConnection","app_id":"A05JDF4JY7R"}, +{"user_id":0,"user_name":"slack","date":"1690365043","change_type":"app_collaborator_added","app_type":"OpenSourceAirbyteConnection","app_id":"A05JDF4JY7R"}, +{"user_id":"U05JQJJDJ6P","user_name":"eric.pinzur","date":"1690360092","change_type":"added","app_type":"Airbyte","app_id":"A02FQL8CHPH","scope":"read,identify"} +] \ No newline at end of file diff --git a/examples/slackbot/data/iloveai-initial-export/random/2023-07-25.json b/examples/slackbot/data/iloveai-initial-export/random/2023-07-25.json new file mode 100644 index 000000000..56c378f61 --- /dev/null +++ b/examples/slackbot/data/iloveai-initial-export/random/2023-07-25.json @@ -0,0 +1,9 @@ +[ + { + "type": "message", + "subtype": "channel_join", + "ts": "1690314133.550289", + "user": "U05JQJJDJ6P", + "text": "<@U05JQJJDJ6P> has joined the channel" + } +] \ No newline at end of file diff --git a/examples/slackbot/data/iloveai-initial-export/random/2023-07-26.json b/examples/slackbot/data/iloveai-initial-export/random/2023-07-26.json new file mode 100644 index 000000000..6dcf7b056 --- /dev/null +++ b/examples/slackbot/data/iloveai-initial-export/random/2023-07-26.json @@ -0,0 +1,302 @@ +[ + { + "client_msg_id": "591fec0d-bdfb-474b-8978-77a54893f62b", + "type": "message", + "text": "old message in random channel", + "user": "U05JQJJDJ6P", + "ts": "1690360213.550579", + "blocks": [ + { + "type": "rich_text", + "block_id": "jeu", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "old message in random channel" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "g18ebd8787ba", + "image_72": "https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-72.png", + "first_name": "eric", + "real_name": "eric", + "display_name": "", + "team": "T05JA5XCR9D", + "name": "eric.pinzur", + "is_restricted": false, + "is_ultra_restricted": false + }, + "edited": { + "user": "U05JQJJDJ6P", + "ts": "1690360225.000000" + } + }, + { + "client_msg_id": "0cd6b256-a19f-4038-8c7f-a054f64e71eb", + "type": "message", + "text": "old thread in random channel", + "user": "U05JQJJDJ6P", + "ts": "1690360240.229079", + "blocks": [ + { + "type": "rich_text", + "block_id": "bQ8", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "old thread in random channel" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "g18ebd8787ba", + "image_72": "https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-72.png", + "first_name": "eric", + "real_name": "eric", + "display_name": "", + "team": "T05JA5XCR9D", + "name": "eric.pinzur", + "is_restricted": false, + "is_ultra_restricted": false + }, + "thread_ts": "1690360240.229079", + "reply_count": 4, + "reply_users_count": 2, + "latest_reply": "1690400587.318779", + "reply_users": [ + "U05JQJJDJ6P", + "U05JH8BCZST" + ], + "replies": [ + { + "user": "U05JQJJDJ6P", + "ts": "1690360249.137789" + }, + { + "user": "U05JQJJDJ6P", + "ts": "1690360253.041239" + }, + { + "user": "U05JQJJDJ6P", + "ts": "1690360257.375049" + }, + { + "user": "U05JH8BCZST", + "ts": "1690400587.318779" + } + ], + "is_locked": false, + "subscribed": true, + "last_read": "1690400587.318779" + }, + { + "client_msg_id": "9c0ca20a-1ba2-4a77-8ae6-0a0df6e1b170", + "type": "message", + "text": "reply 1", + "user": "U05JQJJDJ6P", + "ts": "1690360249.137789", + "blocks": [ + { + "type": "rich_text", + "block_id": "RmwzB", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "reply 1" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "g18ebd8787ba", + "image_72": "https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-72.png", + "first_name": "eric", + "real_name": "eric", + "display_name": "", + "team": "T05JA5XCR9D", + "name": "eric.pinzur", + "is_restricted": false, + "is_ultra_restricted": false + }, + "thread_ts": "1690360240.229079", + "parent_user_id": "U05JQJJDJ6P" + }, + { + "client_msg_id": "39f61ac4-158f-455a-96d7-95620f56316e", + "type": "message", + "text": "reply 2", + "user": "U05JQJJDJ6P", + "ts": "1690360253.041239", + "blocks": [ + { + "type": "rich_text", + "block_id": "OAk", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "reply 2" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "g18ebd8787ba", + "image_72": "https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-72.png", + "first_name": "eric", + "real_name": "eric", + "display_name": "", + "team": "T05JA5XCR9D", + "name": "eric.pinzur", + "is_restricted": false, + "is_ultra_restricted": false + }, + "thread_ts": "1690360240.229079", + "parent_user_id": "U05JQJJDJ6P" + }, + { + "client_msg_id": "74b7d4b6-664c-48f1-b751-246ed6ffc571", + "type": "message", + "text": "reply 3", + "user": "U05JQJJDJ6P", + "ts": "1690360257.375049", + "blocks": [ + { + "type": "rich_text", + "block_id": "B+5ba", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "reply 3" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "g18ebd8787ba", + "image_72": "https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-72.png", + "first_name": "eric", + "real_name": "eric", + "display_name": "", + "team": "T05JA5XCR9D", + "name": "eric.pinzur", + "is_restricted": false, + "is_ultra_restricted": false + }, + "thread_ts": "1690360240.229079", + "parent_user_id": "U05JQJJDJ6P" + }, + { + "type": "message", + "subtype": "channel_join", + "ts": "1690365307.466529", + "user": "U05K6LQRDKK", + "text": "<@U05K6LQRDKK> has joined the channel", + "inviter": "U05JQJJDJ6P" + }, + { + "type": "message", + "subtype": "channel_join", + "ts": "1690400220.312529", + "user": "U05JH8BCZST", + "text": "<@U05JH8BCZST> has joined the channel" + }, + { + "client_msg_id": "92cc8463-bd49-47e5-8501-2cc30ef12efd", + "type": "message", + "text": "howdy", + "user": "U05JH8BCZST", + "ts": "1690400587.318779", + "blocks": [ + { + "type": "rich_text", + "block_id": "2Go", + "elements": [ + { + "type": "rich_text_section", + "elements": [ + { + "type": "text", + "text": "howdy" + } + ] + } + ] + } + ], + "team": "T05JA5XCR9D", + "user_team": "T05JA5XCR9D", + "source_team": "T05JA5XCR9D", + "user_profile": { + "avatar_hash": "b2cae8d05d70", + "image_72": "https:\/\/avatars.slack-edge.com\/2023-07-26\/5646266959572_b2cae8d05d70db19c666_72.png", + "first_name": "Ryan", + "real_name": "Ryan Michael", + "display_name": "Ryan Michael", + "team": "T05JA5XCR9D", + "name": "ryan.michael", + "is_restricted": false, + "is_ultra_restricted": false + }, + "thread_ts": "1690360240.229079", + "parent_user_id": "U05JQJJDJ6P", + "reactions": [ + { + "name": "+1", + "users": [ + "U05JQJJDJ6P" + ], + "count": 1 + } + ] + }, + { + "type": "message", + "subtype": "channel_join", + "ts": "1690402590.237779", + "user": "U05JV3K9RB7", + "text": "<@U05JV3K9RB7> has joined the channel" + } +] \ No newline at end of file diff --git a/examples/slackbot/data/iloveai-initial-export/users.json b/examples/slackbot/data/iloveai-initial-export/users.json new file mode 100644 index 000000000..8546ad9a7 --- /dev/null +++ b/examples/slackbot/data/iloveai-initial-export/users.json @@ -0,0 +1,6 @@ +[ +{"id":"U05JH8BCZST","team_id":"T05JA5XCR9D","name":"ryan.michael","deleted":false,"color":"e7392d","real_name":"Ryan Michael","tz":"America\/New_York","tz_label":"Eastern Daylight Time","tz_offset":-14400,"profile":{"title":"","phone":"","skype":"","real_name":"Ryan Michael","real_name_normalized":"Ryan Michael","display_name":"Ryan Michael","display_name_normalized":"Ryan Michael","fields":{},"status_text":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"avatar_hash":"b2cae8d05d70","image_original":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5646266959572_b2cae8d05d70db19c666_original.png","is_custom_image":true,"email":"ryan.michael@datastax.com","first_name":"Ryan","last_name":"Michael","image_24":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5646266959572_b2cae8d05d70db19c666_24.png","image_32":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5646266959572_b2cae8d05d70db19c666_32.png","image_48":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5646266959572_b2cae8d05d70db19c666_48.png","image_72":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5646266959572_b2cae8d05d70db19c666_72.png","image_192":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5646266959572_b2cae8d05d70db19c666_192.png","image_512":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5646266959572_b2cae8d05d70db19c666_512.png","image_1024":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5646266959572_b2cae8d05d70db19c666_1024.png","status_text_canonical":"","team":"T05JA5XCR9D"},"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"is_bot":false,"is_app_user":false,"updated":1690400224,"is_email_confirmed":true,"who_can_share_contact_card":"EVERYONE"}, +{"id":"U05JQJJDJ6P","team_id":"T05JA5XCR9D","name":"eric.pinzur","deleted":false,"color":"9f69e7","real_name":"eric","tz":"Europe\/Amsterdam","tz_label":"Central European Summer Time","tz_offset":7200,"profile":{"title":"","phone":"","skype":"","real_name":"eric","real_name_normalized":"eric","display_name":"","display_name_normalized":"","fields":{},"status_text":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"avatar_hash":"g18ebd8787ba","email":"eric.pinzur@datastax.com","first_name":"eric","last_name":"","image_24":"https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=24&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-24.png","image_32":"https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=32&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-32.png","image_48":"https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=48&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-48.png","image_72":"https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-72.png","image_192":"https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=192&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-192.png","image_512":"https:\/\/secure.gravatar.com\/avatar\/18ebd8787bac254cb250ef312cb79868.jpg?s=512&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0012-512.png","status_text_canonical":"","team":"T05JA5XCR9D"},"is_admin":true,"is_owner":true,"is_primary_owner":true,"is_restricted":false,"is_ultra_restricted":false,"is_bot":false,"is_app_user":false,"updated":1690314223,"is_email_confirmed":true,"who_can_share_contact_card":"EVERYONE"}, +{"id":"U05JV3K9RB7","team_id":"T05JA5XCR9D","name":"charna.parkey","deleted":false,"color":"3c989f","real_name":"Charna Parkey","tz":"America\/Los_Angeles","tz_label":"Pacific Daylight Time","tz_offset":-25200,"profile":{"title":"","phone":"","skype":"","real_name":"Charna Parkey","real_name_normalized":"Charna Parkey","display_name":"Charna Parkey","display_name_normalized":"Charna Parkey","fields":{},"status_text":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"avatar_hash":"356a52f83d46","image_original":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5644057970994_356a52f83d46cdf189f1_original.jpg","is_custom_image":true,"email":"charna.parkey@datastax.com","first_name":"Charna","last_name":"Parkey","image_24":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5644057970994_356a52f83d46cdf189f1_24.jpg","image_32":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5644057970994_356a52f83d46cdf189f1_32.jpg","image_48":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5644057970994_356a52f83d46cdf189f1_48.jpg","image_72":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5644057970994_356a52f83d46cdf189f1_72.jpg","image_192":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5644057970994_356a52f83d46cdf189f1_192.jpg","image_512":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5644057970994_356a52f83d46cdf189f1_512.jpg","image_1024":"https:\/\/avatars.slack-edge.com\/2023-07-26\/5644057970994_356a52f83d46cdf189f1_1024.jpg","status_text_canonical":"","team":"T05JA5XCR9D"},"is_admin":false,"is_owner":false,"is_primary_owner":false,"is_restricted":false,"is_ultra_restricted":false,"is_bot":false,"is_app_user":false,"updated":1690402592,"is_email_confirmed":true,"who_can_share_contact_card":"EVERYONE"}, +{"id":"U05K6LQRDKK","team_id":"T05JA5XCR9D","name":"opensourceairbyteconn","deleted":true,"profile":{"title":"","phone":"","skype":"","real_name":"OpenSourceAirbyteConnection","real_name_normalized":"OpenSourceAirbyteConnection","display_name":"","display_name_normalized":"","fields":{},"status_text":"","status_emoji":"","status_emoji_display_info":[],"status_expiration":0,"avatar_hash":"g31fd8a5943d","api_app_id":"A05JDF4JY7R","always_active":false,"bot_id":"B05KHNTF6GG","first_name":"OpenSourceAirbyteConnection","last_name":"","image_24":"https:\/\/secure.gravatar.com\/avatar\/31fd8a5943d5ab58baa952638d07c06b.jpg?s=24&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0016-24.png","image_32":"https:\/\/secure.gravatar.com\/avatar\/31fd8a5943d5ab58baa952638d07c06b.jpg?s=32&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0016-32.png","image_48":"https:\/\/secure.gravatar.com\/avatar\/31fd8a5943d5ab58baa952638d07c06b.jpg?s=48&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0016-48.png","image_72":"https:\/\/secure.gravatar.com\/avatar\/31fd8a5943d5ab58baa952638d07c06b.jpg?s=72&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0016-72.png","image_192":"https:\/\/secure.gravatar.com\/avatar\/31fd8a5943d5ab58baa952638d07c06b.jpg?s=192&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0016-192.png","image_512":"https:\/\/secure.gravatar.com\/avatar\/31fd8a5943d5ab58baa952638d07c06b.jpg?s=512&d=https%3A%2F%2Fa.slack-edge.com%2Fdf10d%2Fimg%2Favatars%2Fava_0016-512.png","status_text_canonical":"","team":"T05JA5XCR9D"},"is_bot":true,"is_app_user":false,"updated":1690443618} +] \ No newline at end of file diff --git a/examples/slackbot/messages.parquet b/examples/slackbot/messages.parquet new file mode 100644 index 000000000..a3ad8a5ac Binary files /dev/null and b/examples/slackbot/messages.parquet differ diff --git a/examples/slackbot/slackbot.py b/examples/slackbot/slackbot.py new file mode 100644 index 000000000..094b6d68d --- /dev/null +++ b/examples/slackbot/slackbot.py @@ -0,0 +1,157 @@ +import json, math, openai, os, pyarrow +from slack_sdk.web import WebClient +from slack_sdk.socket_mode import SocketModeClient +from slack_sdk.socket_mode.response import SocketModeResponse +import sparrow_pi as kt + +def build_conversation(messages): + message_time = messages.col("ts") + last_message_time = message_time.lag(1) # !!! + is_new_conversation = message_time.seconds_since(last_message_time) > 10 * 60 + + return messages \ + .select("user", "ts", "text", "reactions") \ + .collect(window=kt.windows.Since(is_new_conversation), max=100) + +def build_examples(messages): + duration = kt.minutes(5) # !!! + + coverstation = build_conversation(messages) + shifted_coversation = coverstation.shift_by(duration) # !!! + + reaction_users = coverstation.col("reactions").col("name").collect(kt.windows.Trailing(duration)).flatten() # !!! + participating_users = coverstation.col("user").collect(kt.windows.Trailing(duration)) # !!! + engaged_users = kt.union(reaction_users, participating_users) # !!! + + return kt.record({ "prompt": shifted_coversation, "completion": engaged_users}) \ + .filter(shifted_coversation.is_not_null()) + +def format_prompt(prompt): + return "start -> " + "\n\n".join([f' {msg["user"]} --> {msg["text"]} ' for msg in prompt]) + "\n\n###\n\n" + +def main(): + output_map = {} + + with open('./user_output_map.json', 'r') as file: + output_map = json.load(file) + + print(f'Loaded output map: {output_map}') + + # Initialize Kaskada with a local execution context. + kt.init_session() + + # Initialize OpenAI + openai.api_key = os.environ.get("OPEN_AI_KEY") + + # Initialize Slack + slack = SocketModeClient( + app_token=os.environ.get("SLACK_APP_TOKEN"), + web_client=WebClient(token=os.environ.get("SLACK_BOT_TOKEN")) + ) + + min_prob_for_response = 0.50 + + # Receive Slack messages in real-time + live_messages = kt.sources.read_stream(entity_column="channel", time_column="ts") + + # Receive messages from Slack + def handle_message(client, req): + # Acknowledge the message back to Slack + client.send_socket_mode_response(SocketModeResponse(envelope_id=req.envelope_id)) + + if req.type == "events_api" and "event" in req.payload: + e = req.payload["event"] + + print(f'Received event from slack websocket: {e}') + + # ignore message edit, delete, reaction events + if "previous_message" in e or e["type"] == "reaction_added": + return + + print(f'Sending message event to kaskada: {e}') + + # Deliver the message to Kaskada + live_messages.add_event(pyarrow.json.read_json(e)) + + slack.socket_mode_request_listeners.append(handle_message) + slack.connect() + + # Handle messages in realtime + # A "conversation" is a list of messages + for conversation in build_conversation(live_messages).start().to_generator(): + if len(conversation) == 0: + continue + + print(f'Starting completion on conversation with first message text: {conversation[0]["text"]}') + + prompt = format_prompt(conversation) + + print(f'Using prompt: {prompt}') + + # Ask the model who should be notified + res = openai.Completion.create( + model="davinci:ft-personal:coversation-users-full-kaskada-2023-08-05-14-25-30", + prompt=prompt, + logprobs=5, + max_tokens=1, + stop=" end", + temperature=0, + ) + + print(f'Received completion response: {res}') + + users = [] + logprobs = res["choices"][0]["logprobs"]["top_logprobs"][0] + + print(f'Found logprobs: {logprobs}') + for user in logprobs: + if math.exp(logprobs[user]) > min_prob_for_response: + user = users.strip() + # if users include `nil`, stop processing + if user == "nil": + users = [] + break + users.append(user) + + print(f'Found users to alert: {users}') + + # alert on most recent message in conversation + msg = conversation.pop() + + # Send notification to users + for user_num in users: + if user_num not in output_map: + print(f'User: {user_num} not in output_map, stopping.') + continue + + user_id = output_map[user_num] + + print(f'Found user {user_num} in output map: {user_id}') + + app = slack.web_client.users_conversations( + types="im", + user=user_id, + ) + if len(app["channels"]) == 0: + print(f'User: {user_id} hasn\'t installed the slackbot yet') + continue + + app_channel = app["channels"][0]["id"] + print(f'Got user\'s slackbot channel id: {app_channel}') + + link = slack.web_client.chat_getPermalink( + channel=msg["channel"], + message_ts=msg["ts"], + )["permalink"] + + print(f'Got message link: {link}') + + slack.web_client.chat_postMessage( + channel=app_channel, + text=f'You may be interested in this converstation: <{link}|{msg["text"]}>' + ) + + print(f'Posted alert message') + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/slackbot/user_output_map.json b/examples/slackbot/user_output_map.json new file mode 100644 index 000000000..c87cbe9f6 --- /dev/null +++ b/examples/slackbot/user_output_map.json @@ -0,0 +1 @@ +{"1": "U052Y3ZKGQJ", "2": "U052XUMJF6F", "4": "U052Y3Y23BL", "5": "U052RFMBRF0", "6": "U053MQ05DHN", "8": "U052RFM1QK0", "9": "U053AN5E281", "10": "U052HH8KE0P", "11": "U052RFHJ8EA", "13": "U0530HTBG2Y"} \ No newline at end of file diff --git a/licenses-report.txt b/licenses-report.txt index 8e59eb834..1c3bcab9b 100644 --- a/licenses-report.txt +++ b/licenses-report.txt @@ -4,7 +4,7 @@ (Apache-2.0 OR MIT) AND BSD-3-Clause (1): encoding_rs (MIT OR Apache-2.0) AND Unicode-DFS-2016 (1): unicode-ident 0BSD OR Apache-2.0 OR MIT (1): adler -Apache-2.0 (65): approx, arrow, arrow-arith, arrow-array, arrow-buffer, arrow-cast, arrow-csv, arrow-data, arrow-ipc, arrow-json, arrow-ord, arrow-row, arrow-schema, arrow-select, arrow-string, aws-config, aws-endpoint, aws-http, aws-sdk-s3, aws-sdk-sso, aws-sdk-sts, aws-sig-auth, aws-sigv4, aws-smithy-async, aws-smithy-client, aws-smithy-eventstream, aws-smithy-http, aws-smithy-http-tower, aws-smithy-json, aws-smithy-query, aws-smithy-types, aws-smithy-xml, aws-types, codespan-reporting, edit-distance, flatbuffers, openssl, opentelemetry, opentelemetry-otlp, opentelemetry-proto, opentelemetry_api, opentelemetry_sdk, parquet, prost, prost-derive, prost-types, prost-wkt, prost-wkt-types, rocksdb, similar, sparrow-api, sparrow-arrow, sparrow-catalog, sparrow-compiler, sparrow-core, sparrow-instructions, sparrow-kernels, sparrow-main, sparrow-plan, sparrow-qfr, sparrow-runtime, sparrow-syntax, sparrow-testing, sync_wrapper, thrift +Apache-2.0 (65): approx, arrow, arrow-arith, arrow-array, arrow-buffer, arrow-cast, arrow-csv, arrow-data, arrow-ipc, arrow-json, arrow-ord, arrow-row, arrow-schema, arrow-select, arrow-string, aws-config, aws-endpoint, aws-http, aws-sdk-s3, aws-sdk-sso, aws-sdk-sts, aws-sig-auth, aws-sigv4, aws-smithy-async, aws-smithy-client, aws-smithy-eventstream, aws-smithy-http, aws-smithy-http-tower, aws-smithy-json, aws-smithy-query, aws-smithy-types, aws-smithy-xml, aws-types, codespan-reporting, edit-distance, flatbuffers, openssl, opentelemetry, opentelemetry-otlp, opentelemetry-proto, opentelemetry_api, opentelemetry_sdk, parquet, prost, prost-derive, prost-types, prost-wkt, prost-wkt-types, rocksdb, similar, sparrow-api, sparrow-arrow, sparrow-catalog, sparrow-compiler, sparrow-core, sparrow-instructions, sparrow-kernels, sparrow-main, sparrow-qfr, sparrow-runtime, sparrow-syntax, sparrow-testing, sync_wrapper, thrift Apache-2.0 OR Apache-2.0 WITH LLVM-exception OR MIT (5): io-lifetimes, linux-raw-sys, rustix, wasi, wasi Apache-2.0 OR BSD-3-Clause OR MIT (1): librocksdb-sys Apache-2.0 OR BSL-1.0 (1): ryu diff --git a/proto/kaskada/kaskada/v1alpha/destinations.proto b/proto/kaskada/kaskada/v1alpha/destinations.proto index b1d4f80d5..cdcc21960 100644 --- a/proto/kaskada/kaskada/v1alpha/destinations.proto +++ b/proto/kaskada/kaskada/v1alpha/destinations.proto @@ -8,9 +8,10 @@ import "kaskada/kaskada/v1alpha/pulsar.proto"; // Describes the destination results are materialized to. message Destination { + reserved 2; // old redis oneof destination + oneof destination { ObjectStoreDestination object_store = 1; - RedisDestination redis = 2; PulsarDestination pulsar = 3; } } @@ -39,54 +40,6 @@ message ObjectStoreDestination { } } -// Writes the results directly to a RedisAI instance. -// -// Uses a series of AI.TENSORSET operations -// -// The query expression's type must be a record. -// The record type must include a field named 'key'. -// The value of the 'key' field is used as the AI.TENSORSET key. -// All other fields must be numeric. -// -// See https://redis.io/topics/protocol -message RedisDestination { - // The hostname of the Redis instance. - string host_name = 1; - - // The port of the Redis instance. - int32 port = 2; - - // When `true`, TLS will be used to connect to Redis. - bool use_tls = 3; - - // The Redis database number 0 to 15. - int32 database_number = 4; - - // The password to connect to the Redis instance - string password = 5; - - // An X.509 certificate to use for authenticating the server - // to connected clients, masters or cluster peers. - // The string should be PEM formatted. - string tls_cert = 6; - - // An X.509 private key to use for authenticating the server - // to connected clients, masters or cluster peers. - // The string should be PEM formatted. - string tls_key = 7; - - // A PEM encoded CA's certificate. - string tls_ca_cert = 8; - - // InsecureSkipVerify controls whether a client verifies the - // server's certificate chain and host name. - // If this field is true, TLS accepts any certificate - // presented by the server and any host name in that certificate. - // In this mode, TLS is susceptible to man-in-the-middle attacks. - // This should be used only for testing. - bool insecure_skip_verify = 9; -} - message PulsarDestination { PulsarConfig config = 1; } diff --git a/proto/kaskada/kaskada/v1alpha/query_service.proto b/proto/kaskada/kaskada/v1alpha/query_service.proto index c8e70481e..f8d581550 100644 --- a/proto/kaskada/kaskada/v1alpha/query_service.proto +++ b/proto/kaskada/kaskada/v1alpha/query_service.proto @@ -69,22 +69,6 @@ message Query { int64 preview_rows = 1; } - message RedisBulkResponse { - // The tensor shape to output values. - // - // Exactly one dimension's value must be zero - this dimension's - // cardinality is determined by the number of output values. The - // number of output values must be a multiple of the product of - // the nonzero dimensions. - // - // Example: - // [0] - Column vector: [1,2,3,4,5,6] - // [1, 0] - Single row vector: [[1,2,3,4,5,6]] - // [2, 0] - tuple vectors: [[1,2], [3,4], [5,6]] - // [0, 2] - two row vectors: [[1,2,3], [4,5,6]] - repeated int32 shape = 1; - } - enum ResultBehavior { // Unspecified - Invalid Value RESULT_BEHAVIOR_UNSPECIFIED = 0; diff --git a/proto/kaskada/kaskada/v1alpha/schema.proto b/proto/kaskada/kaskada/v1alpha/schema.proto index c12ebe6bf..faa8cbd41 100644 --- a/proto/kaskada/kaskada/v1alpha/schema.proto +++ b/proto/kaskada/kaskada/v1alpha/schema.proto @@ -32,12 +32,18 @@ message DataType { google.protobuf.Empty window = 3; // A list of a different type. - DataType list = 4; + List list = 4; // A map type. Map map = 5; } + message List { + string name = 1; + DataType item_type = 2; + bool nullable = 3; + } + message Map { string name = 1; bool ordered = 2; diff --git a/proto/kaskada/kaskada/v2alpha/query_service.proto b/proto/kaskada/kaskada/v2alpha/query_service.proto index 4b235611d..efc8ba6e4 100644 --- a/proto/kaskada/kaskada/v2alpha/query_service.proto +++ b/proto/kaskada/kaskada/v2alpha/query_service.proto @@ -107,9 +107,6 @@ message QueryConfig { kaskada.v1alpha.Destination destination = 3; // Determines how results are returned. - // - // Note that for Destination -> RedisBulkResponse or RedisAI, the only - // valid option is `FinalResults` ResultBehavior result_behavior = 4; // Configure limits on the output set. @@ -140,12 +137,6 @@ message ParquetResults { repeated string paths = 1; } -message RedisBulkResults { - // URIs identifying the Redis Bulk files containing the query - // results. - repeated string paths = 1; -} - message QueryOutput { kaskada.v1alpha.FileResults file_results = 1; } diff --git a/python/.coveragerc b/python/.coveragerc new file mode 100644 index 000000000..6c31cc5cb --- /dev/null +++ b/python/.coveragerc @@ -0,0 +1,4 @@ +[report] +exclude_also = + # Don't complain about `__all__ = ...` + __all__ = \ No newline at end of file diff --git a/python/.darglint b/python/.darglint new file mode 100644 index 000000000..89956bb7a --- /dev/null +++ b/python/.darglint @@ -0,0 +1,5 @@ +[darglint] +strictness = short +docstring_style=numpy +ignore_regex=^(test)?_(.*), +message_template={path}:{line} in {obj}: {msg_id} {msg} \ No newline at end of file diff --git a/python/.flake8 b/python/.flake8 new file mode 100644 index 000000000..e88a55d99 --- /dev/null +++ b/python/.flake8 @@ -0,0 +1,7 @@ +[flake8] +select = B,B9,C,E,F,N,W +ignore = E203,E501,W503 +max-line-length = 100 +max-complexity = 10 +rst-roles = class,const,func,meth,mod,ref +rst-directives = deprecated \ No newline at end of file diff --git a/python/CHANGELOG.md b/python/CHANGELOG.md new file mode 100644 index 000000000..58d073d9b --- /dev/null +++ b/python/CHANGELOG.md @@ -0,0 +1,4870 @@ +# CHANGELOG + + + +## v0.6.0-a.1 (2023-08-21) + +### Ci + +* ci: fix conditions (#679) ([`beb5dda`](https://github.com/kaskada-ai/kaskada/commit/beb5dda39fb6a0f5b576bc643a214c74087bf19a)) + +* ci: fix conditions ([`56394a5`](https://github.com/kaskada-ai/kaskada/commit/56394a582551f3e924605d83772429cf75d813e4)) + +* ci: Stub out CI for new python package (#661) + +Initial stubs were generated by `maturin`. + +Updated (I think) to reflect the location in `python`. ([`1d7ea51`](https://github.com/kaskada-ai/kaskada/commit/1d7ea511052fcae26e1f778c193c11ff8736bf43)) + +### Documentation + +* docs: Remove home page from menu (can click logo) (#680) ([`4046a05`](https://github.com/kaskada-ai/kaskada/commit/4046a0531230d0c4b92bcaae4aff3dfa80324ea5)) + +* docs: Stub more doc content (#668) + +Pulling in stuff from existing docs and past drafts. ([`1fb920f`](https://github.com/kaskada-ai/kaskada/commit/1fb920f282151c95e57c467dc7021b73fcaafa4d)) + +* docs: Update existing docs (#655) + +Tweak wording on a few pages. +Make sure all the methods are documented. + +Use poetry groups for dependencies in noxfile. ([`880936c`](https://github.com/kaskada-ai/kaskada/commit/880936c7528e44ab6620d6a57d1a3a1f3dc787ad)) + +* docs: some polishing / landing page work (#618) ([`6bd484c`](https://github.com/kaskada-ai/kaskada/commit/6bd484c19ef9a524e3aee1ae052ba5edfb6fc195)) + +* docs: Some doc tweaks (#610) + +Get the type alias to render, along with links. Rename the type alias to +`Literal` and use it just for the literals. ([`a1e1ca8`](https://github.com/kaskada-ai/kaskada/commit/a1e1ca86c4b456942dbcdf7859e43acdf0b0a45e)) + +### Feature + +* feat: add udf sparrow trait (#674) + +Adds the sparrow trait for user defined functions. This will allow us to +both keep python dependencies in `python` crates and implement +evaluators from other `crates`. ([`c87108a`](https://github.com/kaskada-ai/kaskada/commit/c87108a17980691c34c17cf031fbfec998151d75)) + +* feat: Support plotting of Timestream objects (#673) ([`ac98795`](https://github.com/kaskada-ai/kaskada/commit/ac98795e8bdf4fd06fc357d7c8d7876725867e2c)) + +* feat: add other aggregations (#669) + +Adds the following aggregations: count_if, max, mean, min, stddev, and +variance ([`c755d60`](https://github.com/kaskada-ai/kaskada/commit/c755d601cbf3c88775bb7c996e4fce69baedd1ac)) + +* feat: Warn when using `==` or `!=` on timestreams (#676) ([`d51dbce`](https://github.com/kaskada-ai/kaskada/commit/d51dbce2b0ebdeaedf846d15dc55c9b07b3161b4)) + +* feat: add else call (#671) + +Adds an else call. ([`d07e8c4`](https://github.com/kaskada-ai/kaskada/commit/d07e8c47181913b341bc6dbafdb4a656f7e24c92)) + +* feat: Support floating point time columns (#664) + +Also allow specifying the time unit for int and float. ([`26464ac`](https://github.com/kaskada-ai/kaskada/commit/26464acc1016d5ceaa8ce275f510f3ccaaf46241)) + +* feat: add count aggregation (#660) + +Adds the count aggregation to the new python implementation. Adds +`count` in #662 . ([`fffc1ac`](https://github.com/kaskada-ai/kaskada/commit/fffc1ac4efc4a29922ac821d1a4296c16ec165e9)) + +* feat: add seconds_since timestamp_ns (#652) ([`d3dba5d`](https://github.com/kaskada-ai/kaskada/commit/d3dba5d985a601bb131573521fee92947cd8d54b)) + +* feat: hack collect trailing with shift by (#658) + +Attempts to fix the behavior of trailing windows in collect by forcing a +merge with a shifted message to the next `duration`, which clears out +the window of the previous event. ([`0de6eab`](https://github.com/kaskada-ai/kaskada/commit/0de6eab8bdbf7473d7786917415c81b68b051852)) + +* feat: rename sparrow-py to kaskada (#659) + +- Initial version 0.6.0 is the next minor version since the most recent +kaskada release. +- Move `/sparrow-py` to `/python` +- Change standard import from `import kaskada as kt` to `import kaskada +as kd` ([`9a9b793`](https://github.com/kaskada-ai/kaskada/commit/9a9b793f07ab7dee387b14cfbcfcbb74eb5404ec)) + +* feat: Lambdas for building records/extending (#653) ([`88957e7`](https://github.com/kaskada-ai/kaskada/commit/88957e7c9eb72cb7619845db645b77a640f0426f)) + +* feat: support trailing windows in all primitive collect evaluators (#649) + +I decided to just implement the trailing windows in the primitive types, +as it was easy. ([`5132817`](https://github.com/kaskada-ai/kaskada/commit/51328179c66c52ece2eaa38d5376650cf9b65e16)) + +* feat: use collect for lag in python builder (#635) ([`45deed2`](https://github.com/kaskada-ai/kaskada/commit/45deed2cae85abd065a159a36b994d95b7e3c9d1)) + +* feat: trailing windows in collect for `DataType::Struct` (#641) + +Only adds trailing windows for `DataType::Struct` in `collect()` +function. ([`15c0d14`](https://github.com/kaskada-ai/kaskada/commit/15c0d14ca6b6c785f42f4fe975bf429122d3fc2d)) + +* feat: make collect continuous by setting interpolation (#647) ([`98c3be3`](https://github.com/kaskada-ai/kaskada/commit/98c3be3979d09de1e3ee1b32fa7bb56a7c49edc1)) + +* feat: Run simplifications in sparrow-py (#645) + +I'm not sure why, but this seems to fix the issue applying `last` to +`with_key`. ([`55d8cf7`](https://github.com/kaskada-ai/kaskada/commit/55d8cf74dda6abe1d81dd6cfcb14305b490b177e)) + +* feat: Add union operation (#642) ([`2e1cf23`](https://github.com/kaskada-ai/kaskada/commit/2e1cf23f77afac5f0aa82108fb5f2b1047ca8ec2)) + +* feat: Cleanup and extend sources (#643) + +This drops the `Source` from the name, since we use them as +`kt.sources.CsvString`. + +This introduces `JsonlString` (for line-delimited JSON strings), +`Pandas` for pandas dataframes, `Parquet` for Parquet data, and `Pylist` +for lists of Python dicts. + +The schema can be specified in the constructor. This allows creating two +sources using the same schema, by creating one and using it's schema +when creating the other. Additional data added to the source is +requested to conform to the original schema. ([`f936b40`](https://github.com/kaskada-ai/kaskada/commit/f936b409c6e097c01788ca92aacf4bc7f26167f0)) + +* feat: latched spread fallback (#640) ([`65d0fa0`](https://github.com/kaskada-ai/kaskada/commit/65d0fa0232f368c5fb31f27dd834b044fd63e39f)) + +* feat: Update key hash inverses as new data arrives (#639) + +Previously, we froze the key hash inverse based on the data that was +available when a query started (by computing it from the merged data). +Now, we compute it as data is added and maintain it. + +Note: This does mean that if a `with_key` runs with the same grouping as +an existing table, any keys it adds will be added to the in-memory key +hash inverse. ([`ae98c60`](https://github.com/kaskada-ai/kaskada/commit/ae98c608fd6dccfc988f4479fbc37de4c68e89e3)) + +* feat: add since window for structs (#638) + +Also makes `collect` continuous. ([`720524b`](https://github.com/kaskada-ai/kaskada/commit/720524bd9b2c62cb2e171ba306e43def200ab705)) + +* feat: Unlatched spread for all the things (#637) ([`a547f9c`](https://github.com/kaskada-ai/kaskada/commit/a547f9c6a1974fa6ef32b1003e6a28de19d972f1)) + +* feat: Add the trailing window (#636) + +This also does the package reworking so that `kt.windows.Since` is the +since window (rather than `kt.SinceWindow`). ([`bb7c5be`](https://github.com/kaskada-ai/kaskada/commit/bb7c5be99d323b2020c96ab0b0d933cdb7ff9697)) + +* feat: Support boolean literals (#634) ([`6279fe3`](https://github.com/kaskada-ai/kaskada/commit/6279fe3c0e97ac80d98c46b84cd278391421018e)) + +* feat: hack collect lists by wrapping in struct (#633) + +Hacks `collect(list<t>) -> list<list<t>>` support by wrapping it in a +struct then field reffing. Only works from pyton builder. ([`bd43166`](https://github.com/kaskada-ai/kaskada/commit/bd43166fd97d98c7be7ca3b18e600d7211ef2fa6)) + +* feat: shift sparrow functions (#620) + +Adds shift-to and shift-until and shift-by sparrow functions + +Lints by +`nox -s fix-lint` ([`e2f6b97`](https://github.com/kaskada-ai/kaskada/commit/e2f6b97cd9670b6345e2c25bdf3cb3c0b3d6e91f)) + +* feat: Support field-ref on lists of structs (#630) + +A similar strategy should work for maps, but I didn't implement it yet. + +--------- + +Co-authored-by: Jordan Frazier <jordan.frazier@datastax.com> ([`be88967`](https://github.com/kaskada-ai/kaskada/commit/be8896734768c1d399348f84c1475c796eb2cf04)) + +* feat: Add a flatten operation (#627) + +I haven't been able to test this yet since we don't have an easy way to +generate a list of lists. + +This also adds type inference for nested collections and tests that. + +--------- + +Co-authored-by: Jordan Frazier <jordan.frazier@datastax.com> ([`49aa050`](https://github.com/kaskada-ai/kaskada/commit/49aa0500dd1867c956fb1a1a3183c92d8571c148)) + +* feat: Support arithmetic chains (#628) ([`037b7be`](https://github.com/kaskada-ai/kaskada/commit/037b7be5a4049cdfe4dbb7b1c2e95eed6340aa6b)) + +* feat: implement lag with collect (#625) + +This also changes the behavior of lag to `IGNORE NULLS` (likely similar +to the options in this lag: +https://learn.microsoft.com/en-us/sql/t-sql/functions/lag-transact-sql?view=sql-server-ver16). ([`3578b20`](https://github.com/kaskada-ai/kaskada/commit/3578b20ec5a79072d1d68c175e82a0271ef04066)) + +* feat: Introduce `col` method for field access (#624) ([`e64959e`](https://github.com/kaskada-ai/kaskada/commit/e64959e39e6a26fc2d42f032d94c37dc939b15d4)) + +* feat: Asynchronous materializations (#617) ([`14eed90`](https://github.com/kaskada-ai/kaskada/commit/14eed904d2d673c9bc20d6ceca0a6d12d9d81c99)) + +* feat: Support casting timestreams (#623) + +To make things more uniform, it was helpful to have an explicit class +method for creating a literal timestream. ([`7c33a43`](https://github.com/kaskada-ai/kaskada/commit/7c33a43b9d3d0540d4f75807f6372e7bc9b22f2e)) + +* feat: ignore null values in collect (#616) + +Ignoring null values in `collect` is more consistent with the existing +aggregations. It also allows us to implement lag with collect. + +It also retains the capability to include nulls (albeit a hacky manner), +by doing the following (since the record is never null) +``` +{ v: value } | collect() +``` ([`d6c69e0`](https://github.com/kaskada-ai/kaskada/commit/d6c69e0d525854af157580b54d56deccd3e9d951)) + +* feat: Add basic time arithmetic (#615) ([`4b627e9`](https://github.com/kaskada-ai/kaskada/commit/4b627e9f64e78046ed2d2973bcb425870326101d)) + +* feat: Introduce InMemoryBatches for managing data (#611) + +This supports the existing "add a batch to a table", but also allows +subscribing to a stream of additions. + +As part of this moved the existing (old) merge code to a separate crate, +so it could be used by the compiler (which is currently responsible for +the data contextt). ([`25725b4`](https://github.com/kaskada-ai/kaskada/commit/25725b4468f623a7f6c23594a07d4d6256efad99)) + +* feat: more basic function in sparrow py (#612) + +adds: + +`filter` (when) +`time_of` +`lag` +`with_key` +`lookup` +`if_` and `null_if` +`len_` or `length` ([`eff5666`](https://github.com/kaskada-ai/kaskada/commit/eff56664f5bfa4eee9984d7dc44551fc040cc22d)) + +* feat: Async iteration through Rust (#606) ([`53b6905`](https://github.com/kaskada-ai/kaskada/commit/53b69059a250aef5fd6330365d3f806745f2938d)) + +* feat: add min parameter to collect (#607) + +Adds the `min` parameter to collect, which allows a user to specify a +minimum list length before a non-null value is produced from +`collect()`. also reorders parameters to make the python builder arg +pattern a little cleaner. + +Unfortunately, we still aren't able to use `collect` as a replacement +for `lag`, given the current behavior. `Lag` does not count `null` as a +valid value, while `collect` does. See the ignored unit test for an +example of differences. ([`08f5e9c`](https://github.com/kaskada-ai/kaskada/commit/08f5e9c69da488e2127943df7d1b51e970c2f334)) + +* feat: add list_len function (#609) + +This is the same implementation for all types, so we can move to a +generic `length` evaluator when we move to `sparrow-expressions` +package. ([`8e2b748`](https://github.com/kaskada-ai/kaskada/commit/8e2b7484a325b6718b0c4e5a4f2ea1f3b04fb416)) + +* feat: First pass at doc site for Python (#605) + +This uses Sphinx and the + +[sphinx-book-theme](https://sphinx-book-theme.readthedocs.io/en/stable/) +to create organized documentation of the available methods for creating +and executing timestreams. + +Sphinx makes it easy to include syntax-highlighted examples from the +documentation, as well as embedding the **rendered** notebook output of +cells (embedding them in documentation we write) and even including +notebooks as part of the documentation. + +With a few tweaks, we can make every notebook cell executable as well. +See for instance +https://jupyterbook.org/en/stable/interactive/thebe.html. ([`d31cb43`](https://github.com/kaskada-ai/kaskada/commit/d31cb437dfdd0b0941747e3ba70ad8911bbc4d2e)) + +* feat: Allow running non-record columns (#604) ([`98dec21`](https://github.com/kaskada-ai/kaskada/commit/98dec2112a8436b6a78a4b1a46c1a82e8d90f497)) + +* feat: support `DataType::Struct` in collect function (#602) + +Supports `DataType::Struct` in the collect function. ([`f427918`](https://github.com/kaskada-ai/kaskada/commit/f427918be316f95d9a92fc1960e3675b504bc2b6)) + +* feat: Rename `Expr` to `Timestream`; stub docs (#603) + +Main change is renaming `Expr` to `Timestream`. + +Other changes include: +- Adding methods (and tests) for basic math and comparison operators. +- Getting `nox -s docs` working for some rudimentary documentation +(WIP). +- Fleshing out docs for existing methods. ([`2bb67c1`](https://github.com/kaskada-ai/kaskada/commit/2bb67c17c15e823a6b6aac633dbf99e1a4fdaf08)) + +* feat: Introduce the Result class (#601) + +This allows multiple ways of interacting with the results of a query. + +Also change the `show` method (which printed to stdout) to `preview` +which returns a DataFrame "preview" of the results. This should play +more nicely with testing as well as printing / visualizing as the user +wishes. ([`962c15d`](https://github.com/kaskada-ai/kaskada/commit/962c15d1639a8bd6ae4528cd3df0778a2f35b02a)) + +* feat: More work on Python API (#598) + +- Introduce options for limits, and the method `show()` for displaying +the results of a query. Will use `ipython.display()` if available, or +print the Pandas dataframe to stdout otherwise. +- Added pydoc for collect +- Fixed some issues in the pytest for adding data (specifically around +how the `_time` column round-tripped through the JSON in the golden +test). +- Updated some of the modules to allow `import sparrow_py as kt` and +using `kt.sources.CsvSource`. The trick was to make sure the root module +(`sparrow_py` currently) re-exports the submodule (`sources`). ([`d6bf2b2`](https://github.com/kaskada-ai/kaskada/commit/d6bf2b210a2cd7da7db82dbbbcf5cd1f0e47d227)) + +* feat: sparrow py collect function (#595) + +support `collect` function in sparrow-py + +--------- + +Co-authored-by: Ben Chambers <35960+bjchambers@users.noreply.github.com> ([`ed026ae`](https://github.com/kaskada-ai/kaskada/commit/ed026ae1098f2fe5a04ed8769458eac42eb074aa)) + +* feat: Expose execution control from Rust to Python (#596) + +This is a building block for allowing Python code to expose batches as +generators, async generators, etc. ([`5be52a0`](https://github.com/kaskada-ai/kaskada/commit/5be52a00358c95cec9cf3bb76d5a29635edd25ea)) + +* feat: add first/last list non-windowed aggregation support (#592) ([`2984bb0`](https://github.com/kaskada-ai/kaskada/commit/2984bb003cb4e5c9f0c6feca387410807ab49b36)) + +* feat: Allow adding more data to a table (#593) + +This allows calling `add` on the ArrowSource. We could expose similar +functionality for CSV. ([`894adbb`](https://github.com/kaskada-ai/kaskada/commit/894adbbd42a5ee6f19d5727b47c2f0d75b2ed51f)) + +* feat: Support aggregation functions in Python (#586) + +Introduces Python classes for representing windows and flattens them +into arguments for the corresponding aggregation functions. + +This also changes the AST to DFG logic so that it only tries to flatten +if the original AST is available. This does mean that ticks won't be +recreated, meaning they won't have the proper domain. + +Both of these likely need to be extended to cover other aggregations. ([`1d3724c`](https://github.com/kaskada-ai/kaskada/commit/1d3724c009a66bb05d04c4b5135681828f3f1734)) + +* feat: support since windows in collect aggregations (#583) + +Also fixes bug where since windows were...never used. The since window +impl is likely more efficient than the more complex two-stacks, so it's +likely better to swap. ([`f1f079a`](https://github.com/kaskada-ai/kaskada/commit/f1f079afd17c8767ddfefe5d15f6f9086d892b5a)) + +* feat: Add record field manipulation (#582) + +Select fields, remove fields (inverted selected), extend and record +creation. ([`71e8345`](https://github.com/kaskada-ai/kaskada/commit/71e834561df80795d7446b14867bb527ab35c130)) + +* feat: OpenAI Example Notebook (#573) + +# OpenAI Example Notebook + +Adds an OpenAI Example Notebook + +## Next Steps +* Reactions? ([`53bb096`](https://github.com/kaskada-ai/kaskada/commit/53bb09647caf454baff804ba230554023ff34973)) + +* feat: Execute Python queries (#575) + +This removes the ability to inspect the prepared data from the Python +FFI. Instead, the Python FFI now has an `execute` method allowing you to +inspect the results of an expression, including the bare table. + +This changes the execute methods a bit to have a simpler API to +interface with. + +This relies on pushing "in-memory" data into the `DataContext`, which is +very brittle and hacky. For example, it currently only supports adding +data once, and requires that the schemas line up. ([`a81973e`](https://github.com/kaskada-ai/kaskada/commit/a81973e5881346e8e917a77eeec17c50fb43a5b0)) + +* feat: Allow writing to a channel (#572) + +This introduces a `Destination` enum and converts the existing protobuf +to that before execution. This allows providing an output channel for +direct output (eg., to Python via PyArrow). + +This also removes the deprecated Redis output. ([`62143e7`](https://github.com/kaskada-ai/kaskada/commit/62143e757955f5f4acf0c82b782a96ad706832e6)) + +* feat: collect to list (non-windowed) (primitive/strings/booleans) (#569) + +Adds the `collect` function for non-windowed, primitive/string/boolean +types. + +This doesn't support `LargeUtf8` because the other string aggregations +that use the `create_typed_evaluator` macro don't support generic offset +size, so that will come in a follow up. + +--------- + +Co-authored-by: Kevin J Nguyen <kevin.nguyen@datastax.com> ([`0f33802`](https://github.com/kaskada-ai/kaskada/commit/0f33802e4a04da216ff791ae7b784d9943204f67)) + +* feat: Prepare data in rust (#568) + +This breaks some of the Rust code into a new `sparrow-session` crate +providing a place for implementing things that aren't specific to the +Python FFI. + +This also introdoces some pieces for error conversions, with the hope +that we can grow to a point where we log errors on the rust side but +surface relevant details to the Python side. ([`f58306e`](https://github.com/kaskada-ai/kaskada/commit/f58306ef93914712265e9a47014f053ee6b4056e)) + +* feat: Add a Table class on the Rust side (#566) + +This moves data management into the Rust codebase. It only counts the +number of record batches that have been added. + +Next steps after this PR: + +1. Preparing (and possibly concatenating) the record batches in-memory. +2. Executing queries over the in-memory batches. ([`3b236da`](https://github.com/kaskada-ai/kaskada/commit/3b236da47135abd3f49dbdd17da4b31cd9c94fa5)) + +* feat: sparrow_py initial table management (#565) + +# Sparrow_py Initial Table Management + +## Changes +* Adds an `add` method to `Table`. +* Adds a `get_data` method to `Table`. +* Adds schema assertion requirements to load data +* Adds naive schema validation at table creation time ([`43d5daa`](https://github.com/kaskada-ai/kaskada/commit/43d5daa7524a005b0e425e3b029e15c4218aeda3)) + +* feat: Connect Python builder to Rust (#561) + +This creates DFG nodes and checks their types. + +Some extra features: + +- Uses the pyarrow types of the expression to limit some overloads. + In the case of `__getattr__` this prevents mistakes leading easily to + infinite recursion. In the case of `expr[expr]` this lets us use the + correct methods. +- Allows using python literals (`str`, `int` and `float`) as arguments + to expressions. +- Renames `ffi` module to `_ffi` to indicate it is private. +- Add some tests fro error cases ([`4483e13`](https://github.com/kaskada-ai/kaskada/commit/4483e13126ccdf4296e4d7b4c8d7567ef6c4dbd4)) + +* feat: `index` for DataType::List (#562) + +Adds type inference support for lists and adds just the `index` +function. + +Further additions: https://github.com/kaskada-ai/kaskada/issues/494 ([`82adac4`](https://github.com/kaskada-ai/kaskada/commit/82adac4a4f452def5efdbfcf53a2b4e676b7e5e9)) + +* feat: Prototype calling Python UDFs from rust (#559) ([`44ef3f9`](https://github.com/kaskada-ai/kaskada/commit/44ef3f947f0949c678e487d11c39ae242aa0637f)) + +* feat: add prototype python query builder (#553) + +This adds a new top-level crate `sparrow-py`. It is intentionally +outside of `crates` since it needs to be in a separate workspace to +properly compile the Python code. + +This supports basic builders, implements most of the math operators +directly on expressions, and supports a `pipe` function similar to +Pandas. + +In Python 3.11 the exceptions are printed with locations highlighted and +type information of each argument is printed. ([`39f85f3`](https://github.com/kaskada-ai/kaskada/commit/39f85f3f22e2323713cc92dc35bd6d93a4a94fff)) + +### Fix + +* fix: .gitignore and add svg (#675) ([`bf2ef0d`](https://github.com/kaskada-ai/kaskada/commit/bf2ef0d87759895bdc8b4488bfd8f9fc09e4b4eb)) + +* fix: mypy again (#657) ([`50a8196`](https://github.com/kaskada-ai/kaskada/commit/50a81966a083d61e432a0be1a17a0cc93d947d9c)) + +* fix: allow null strings in CSVs (#646) ([`ae9ebaf`](https://github.com/kaskada-ai/kaskada/commit/ae9ebaffeb4a1e361e645a65afff3640f30b0481)) + +* fix: docs-build in noxfile (#613) ([`32540ed`](https://github.com/kaskada-ai/kaskada/commit/32540edb70fccf81fe9eb6f02101079fcc1e8250)) + +* fix: Aggregations in sparrow-py (#590) + +Specifically, the previous fix straightened out some signatures, but +didn't actually use the internal (DFG) signatures for the builder. ([`57c28f1`](https://github.com/kaskada-ai/kaskada/commit/57c28f12b3070e3b900ea2f5e7953ab5cf147a8c)) + +### Test + +* test: Verify pylist sources drop unneeded columns (#650) ([`67e5a58`](https://github.com/kaskada-ai/kaskada/commit/67e5a589917448e24988f15cfa627e7a1a6745dc)) + +* test: Introduce a golden test helper for Python (#589) ([`add60b9`](https://github.com/kaskada-ai/kaskada/commit/add60b9586f6c936b2a216406c619f7cc5d7172a)) + +### Unknown + +* bug: add test for else with records (#677) + +Debugging else with records does appear to work. Added a test for future +regression verification. ([`b84cf12`](https://github.com/kaskada-ai/kaskada/commit/b84cf127dcd86da783f72e358830f55a9f014ac9)) + +* fix lint (#667) ([`27d7cd8`](https://github.com/kaskada-ai/kaskada/commit/27d7cd836502bacd6787f5e0df2b293e8972f34c)) + +* ref: remove signature modes (#670) + +Signature modes are not necessary. They used to be necessary because we +had a handful of varying signatures for aggregations to flatten window +arguments, but that was since removed. ([`76149b0`](https://github.com/kaskada-ai/kaskada/commit/76149b0fc0f175fbceb13f88086b2e5e417eb5c8)) + +* ref: move sparrow_plan into sparrow_instructions (#666) + +This moves sparrow_plan into sparrow_instructions. + +This will allow us to avoid a cyclic dependency when creating the trait +for user defined functions. ([`f6f597a`](https://github.com/kaskada-ai/kaskada/commit/f6f597abaef52c7cbbfc1fc6de6cf6dcd49afc19)) + +* bug: fix is_null to use not_ instead of neg (#663) ([`c58b1dd`](https://github.com/kaskada-ai/kaskada/commit/c58b1dde49f1b0004f339d8b80442a8c74893959)) + +* lint: Fix lint, mypy, etc. (#654) ([`30e2dd4`](https://github.com/kaskada-ai/kaskada/commit/30e2dd4c6240121d30d67245d37894611e0d9ecf)) + +* updated slackbot script (#631) + +* minor updates to notebook version, left the `le = +preprocessing.LabelEncoder()` in there. +* lots of updates to the script version, including: +* swapping the labelEncoder for a json map lookup, which converts user +numbers in the model training set to Kaskada.AI slack userIds +* lots of `print()` debug statements that will be helpful to get this +going. we can remove them pre-demo. + * getting tokens/api-keys from the env + +also added the json data for the output lookup. + +changes are based off this script: +[examples/slackbot/run.py](https://github.com/kaskada-ai/kaskada/compare/main...esp/try_script#diff-2067ff038f2b64f5ce70205cd7274f93552230391a57fc6ea05cc2ff05ffe8f5) +which was working successfully yesterday, when sending single messages +from slack to completion. ([`568465f`](https://github.com/kaskada-ai/kaskada/commit/568465ff3f8d70ae709877c3598d1de0a527cc4c)) + +* Break the bot out into a script, fix a couple bits (#626) ([`90d3d9b`](https://github.com/kaskada-ai/kaskada/commit/90d3d9bc783b92407a30cf2135b34a98fc4e6c91)) + +* Update notebook per Eric's prep steps (#614) + +Co-authored-by: Eric Pinzur <2641606+epinzur@users.noreply.github.com> ([`50a27eb`](https://github.com/kaskada-ai/kaskada/commit/50a27eb0c9580c2d7c5068fea4fd5beb2a72f6c3)) + +* docs update code sample around creating/using session (#550) ([`6624cfa`](https://github.com/kaskada-ai/kaskada/commit/6624cfa97f10aef8f7d65b41069cdd2cb4dc36f9)) + +* ref: Introduce a class for golden tests (#608) + +This makes things a lot clearer than using the argument to determine how +the golden file should be produced. + +Change `json` to `jsonl` to reflect that it is line-delimited. ([`4ba4936`](https://github.com/kaskada-ai/kaskada/commit/4ba4936a549f34b97d648b70631a3d03580b0438)) + +* Update notebook to reflect what's currently possible (#591) + +This includes commented-out "desired state" lines - what's shown +*should* work with sparrow_py currently. ([`6db60f0`](https://github.com/kaskada-ai/kaskada/commit/6db60f04f159097bc49a97367cc9c39022b8b3e4)) + +* ref: Use a dataclass for execution options (#600) + +This reduces some duplication, in that we don't need to have the FFI +define a Python-compatible copy of the Rust execution options. Instead +we have it read straight from the documented dataclass. ([`39b07f9`](https://github.com/kaskada-ai/kaskada/commit/39b07f9ffadb702f7bdb0d5b417625508be14086)) + +* lint: Fix misc lint problems (#599) ([`3266ee4`](https://github.com/kaskada-ai/kaskada/commit/3266ee4acecf0cf505c131f8f8a031a7c3d977ef)) + +* added timestreams placeholder project (#597) + +project has been created on pipy already: +https://pypi.org/project/timestreams/ ([`b64122e`](https://github.com/kaskada-ai/kaskada/commit/b64122e35150f2a0324a3b6604310533999bc9a6)) + +* bug: correctly type-check user facing aggregation signatures (#587) + +and removes unnecessary split in `inst.rs` signatures. ([`0dcb5e5`](https://github.com/kaskada-ai/kaskada/commit/0dcb5e581b5a9002bfa8972062e1e43cc35a8a23)) + +* release: update python and sparrow version (#585) + +Based on current draft release: +https://github.com/kaskada-ai/kaskada/releases ([`1c4d568`](https://github.com/kaskada-ai/kaskada/commit/1c4d5689c65a5f61d3483ece43bea3be0fa64fb1)) + +* ref: Cleanup sources in Python (#581) + +Separate the different kinds of sources, so we have an `ArrowSource` for +PyArrow and Pandas based data. Default to taking an initial batch in the +constructor, so the schema can be inferred. ([`b54d20a`](https://github.com/kaskada-ai/kaskada/commit/b54d20abbb5b11342fcc2b423c89f20811c24499)) + +* ref: Switch to a global session (#579) ([`09fff6b`](https://github.com/kaskada-ai/kaskada/commit/09fff6ba1a061427586da13163e7056ad22c7906)) + +* removed redis destination (#542) + +we haven't supported this for a long time, lets remove it from the +codebase ([`786d566`](https://github.com/kaskada-ai/kaskada/commit/786d566a1fb61ef08339e861619fdb5e56637ed8)) + +* bug: fix default schema in inference for map/list (#577) + +Fixes the default schema for the `map` and `list` types in inference. +This isn't a good solution, as there's no guarantee that Arrow doesn't +change this behavior in the future. Ideally, we can figure out how to +plumb the user map type information through inference. But, it's +difficult because functions that produce `map` or `list` types need an +arbitrary naming/nullability that we define. + +Closes #576 ([`d5960e7`](https://github.com/kaskada-ai/kaskada/commit/d5960e753dcfb58d4bd78639c91ca1eb843b61b4)) + +* fix wren panic on failed complication (#571) + +Added test reproducing the issue described in #551. Found that PR #448 +probably introduced the bug that causes this issue. Updated the code to +fix the problem. ([`f16e0c6`](https://github.com/kaskada-ai/kaskada/commit/f16e0c68eb88fbe2b44b6466e1beef78f7825862)) + +* Convert raw data to Parquet, implement as much as possible with existing tooling. (#567) ([`887c4ee`](https://github.com/kaskada-ai/kaskada/commit/887c4ee5cb716c89f7d41c022a15d2723e7530b7)) + +* bug: fix non-generic conversions in string spread (#563) + +Fixes spots in string spread that weren't generic ([`2f45a40`](https://github.com/kaskada-ai/kaskada/commit/2f45a401ea51d549a52f294d9a9160b9b799cea3)) + +* python client download specific engine release (#564) ([`ab5a4dd`](https://github.com/kaskada-ai/kaskada/commit/ab5a4dde5c72a11820068431b93cc163175b8445)) + +* Stub out notebook for Slackbot demo (#560) + +Co-authored-by: Eric Pinzur <epinzur@gmail.com> ([`cd66b84`](https://github.com/kaskada-ai/kaskada/commit/cd66b84023d69110f09bca7e083eefbcd89af316)) + +* ref: Expose a QueryBuilder (#558) + +This is pretty ugly -- it has to do create a lot of fake locations and +create enums (like `ExprOp`) just to call into the existing AST -> DFG +compilation. We should definitely clean this up if we continue down the +path of using builders. ([`01b0d6c`](https://github.com/kaskada-ai/kaskada/commit/01b0d6c5084f978cadb78353a7bce1749c08cc38)) + + +## v0.5.1 (2023-07-26) + +### Build + +* build: More version upgrades (#557) + +The change to Lalrpop is supposed to provide faster builds. It also +seems to produce slightly better error messages. + +The chanegs to the toml files seem to be a formatting issue (how it +orders the fields). ([`4ebb48a`](https://github.com/kaskada-ai/kaskada/commit/4ebb48adce3182905dbbf417da37500230983873)) + +* build: update tonic versions (#555) ([`3c73199`](https://github.com/kaskada-ai/kaskada/commit/3c731994cadece3544a4478585dfc938b3acf9b9)) + +* build: use statically linked libssl (#546) + +We build our binaries on ubuntu:20 which uses libssl1.1 (through our +direct dependency of `reqwest`). Libssl is dynamically linked and with +systems moving to ubuntu:22 that has libssl3.0 our binary breaks. An +example is Google's Collab. + +We also build multiplatform linux docker image using `cross-rs`. +`cross-rs` currently builds for ubuntu:20 on their `master` branch and +there is currently an open PR +(https://github.com/cross-rs/cross/pull/973) to move to ubuntu:22. + +The proposed solution with this PR is to enable the feature on `reqwest` +that builds the libssl crate (and the libssl library) statically in the +sparrow binaries rather than relying on dynamic linking to pick the +system's libssl library. + +We are *not* moving our CI to use ubuntu:22 until there we have a +solution for building multiarch images on ubuntu:22 (through `cross-rs` +or otherwise). Otherwise, if we move to ubuntu:22 today with out any +other changes our docker image will break on linux on arm (anyone on a +Mac with an M chip running a docker container). + + + + +1. sparrow depends on `reqwest` which brings in libssl. +2. added an `if` condition on the release CI that we missed + + +I verified that the binaries + +Before: +``` +❯ ldd sparrow-main + linux-vdso.so.1 (0x00007fff56643000) + libssl.so.3 => /lib/x86_64-linux-gnu/libssl.so.3 (0x00007f3387ee8000) + libcrypto.so.3 => /lib/x86_64-linux-gnu/libcrypto.so.3 (0x00007f3383e00000) + libstdc++.so.6 => /lib/x86_64-linux-gnu/libstdc++.so.6 (0x00007f3383a00000) + libgcc_s.so.1 => /lib/x86_64-linux-gnu/libgcc_s.so.1 (0x00007f3387ec8000) + libm.so.6 => /lib/x86_64-linux-gnu/libm.so.6 (0x00007f3384321000) + libc.so.6 => /lib/x86_64-linux-gnu/libc.so.6 (0x00007f3383c1f000) + /lib64/ld-linux-x86-64.so.2 (0x00007f3387fa4000) + +``` + + +After + +``` +❯ ldd sparrow-main + linux-vdso.so.1 (0x00007ffc7d3f2000) + libstdc++.so.6 => /lib/x86_64-linux-gnu/libstdc++.so.6 (0x00007f3989800000) + libgcc_s.so.1 => /lib/x86_64-linux-gnu/libgcc_s.so.1 (0x00007f398d9aa000) + libm.so.6 => /lib/x86_64-linux-gnu/libm.so.6 (0x00007f3989b21000) + libc.so.6 => /lib/x86_64-linux-gnu/libc.so.6 (0x00007f398961f000) + /lib64/ld-linux-x86-64.so.2 (0x00007f398d9dd000) +``` + + +The final `release` binary size increases by 2MB + +Before: + +``` +❯ ls -lh sparrow-main +-rwxr-xr-x 2 therapon therapon 85M Jul 24 13:09 sparrow-main + +``` + +After + +``` +❯ ls -lh sparrow-main +-rwxr-xr-x 2 therapon therapon 87M Jul 24 13:06 sparrow-main + +``` ([`fafdf58`](https://github.com/kaskada-ai/kaskada/commit/fafdf581c6e9fe210f9e0c969df5b3ac99467cba)) + +* build: Upgrade Arrow, Parquet and Object Store (#491) ([`0591468`](https://github.com/kaskada-ai/kaskada/commit/0591468c415a1dbe96b52970ad9d0b7468512416)) + +* build: Upgrade Arrow, Parquet and Object Store ([`a27b61e`](https://github.com/kaskada-ai/kaskada/commit/a27b61e83412e99a7408daf3472e4ff11d87de00)) + +* build: Update to arrow 40 (#419) + +Recent versions of Arrow make it much easier to create custom +operations -- things like `array.unary(...)` and working with +buffers more easily. + +This upgraed was a bit painful since we were going through so +many versions. The biggest changes were the low level +operations to work with bits -- we can just iterate directly +now and no longer need our helper. + +Fields also got wrapped in an `Arc` to avoid inefficient deep +copies. Filed #417 and #418 to take advantage of some of these +new capabilities. ([`2258aa2`](https://github.com/kaskada-ai/kaskada/commit/2258aa21db1cc81acadb94120919beb82c3057dc)) + +* build: Update to arrow 40 + +Recent versions of Arrow make it much easier to create custom +operations -- things like `array.unary(...)` and working with +buffers more easily. + +This upgraed was a bit painful since we were going through so +many versions. The biggest changes were the low level +operations to work with bits -- we can just iterate directly +now and no longer need our helper. + +Fields also got wrapped in an `Arc` to avoid inefficient deep +copies. Filed #417 and #418 to take advantage of some of these +new capabilities. ([`271a488`](https://github.com/kaskada-ai/kaskada/commit/271a488b35e82df83c8aee859b4772f5b0714355)) + +* build: Don't ignore `main` (#399) + +This is a dangerous name to ignore -- basically any Java/Scala code will +be in a directory like `src/main` which will be entirely ignored by this +pattern. + +We should either ignore `/main` (if we really want to ignore it only at +the root level) or better yet change the configuration of `wren` to not +compile to `main`. ([`83c30a1`](https://github.com/kaskada-ai/kaskada/commit/83c30a1017a748b22af4595b0f3b67743b1b192c)) + +* build: Don't ignore `main` + +This is a dangerous name to ignore -- basically any Java/Scala code will +be in a directory like `src/main` which will be entirely ignored by this +pattern. + +We should either ignore `/main` (if we really want to ignore it only at +the root level) or better yet change the configuration of `wren` to not +compile to `main`. ([`f2310e5`](https://github.com/kaskada-ai/kaskada/commit/f2310e5ccd6a21b10e1fd396fabce1a37fe35456)) + +* build: update dependencies ([`193b8aa`](https://github.com/kaskada-ai/kaskada/commit/193b8aa014e7447691fbcc117fc0239024cd7992)) + +* build: try fixing the github action syntax ([`fe9ea38`](https://github.com/kaskada-ai/kaskada/commit/fe9ea380673fbadb9e7dad9aaa48941bd39bc1a2)) + +* build: add permissions to push to ghcr.io ([`bc50f04`](https://github.com/kaskada-ai/kaskada/commit/bc50f04a4cf268231a84c561d13ffbb81b9a5644)) + +* build: add package permissions to release ([`b17ddf9`](https://github.com/kaskada-ai/kaskada/commit/b17ddf989790cc19f4c8019b8985c702483e153d)) + +* build: leave the assets where they are ([`339d361`](https://github.com/kaskada-ai/kaskada/commit/339d361ea93869e278927cb84a4a62b791433cd8)) + +* build: fix dockerflie ([`2940cdf`](https://github.com/kaskada-ai/kaskada/commit/2940cdfe30f920e41273218af5d587c96132ae3a)) + +* build: variable references ([`19ed6d2`](https://github.com/kaskada-ai/kaskada/commit/19ed6d2e42fac3505b9279c0aaeb27bae253c12d)) + +* build: work on release drafter ([`76ade93`](https://github.com/kaskada-ai/kaskada/commit/76ade9362e933e8c4ed9b769fa7cfe01dac02abf)) + +* build: fix tag computation ([`3fac672`](https://github.com/kaskada-ai/kaskada/commit/3fac672add3d01f84c4bb8fb51c8d58473950530)) + +* build: set commitish to the sha ([`748ce18`](https://github.com/kaskada-ai/kaskada/commit/748ce188381827504907aaaf9f46e099262a794a)) + +* build: specify the tag correctly ([`c2a29c5`](https://github.com/kaskada-ai/kaskada/commit/c2a29c53bf28a89747a0bf33228ccc0bfe08afbc)) + +* build: determine current date ([`9a3e1e1`](https://github.com/kaskada-ai/kaskada/commit/9a3e1e1b58f5d159d6862cc8cccaef1c6a0e074c)) + +* build: attempt to fix docker build ([`8f23b05`](https://github.com/kaskada-ai/kaskada/commit/8f23b05af261b3e67345daa10fb6e5c804dec3d8)) + +* build: fix relative paths ([`8f0e85e`](https://github.com/kaskada-ai/kaskada/commit/8f0e85ef325b50449bfd2a5734ff627d5853840a)) + +* build: change cache-dependency-path ([`1db250a`](https://github.com/kaskada-ai/kaskada/commit/1db250a70c4bb50da33687f9c523168ed9fca335)) + +* build: remove invalid value ([`e6d1e64`](https://github.com/kaskada-ai/kaskada/commit/e6d1e6425befadf4c6b22031ae3dce9eaf81098f)) + +* build: Fix release drafter ([`1c30e04`](https://github.com/kaskada-ai/kaskada/commit/1c30e04c31de430bec37fb768a9670e76cb0b765)) + +### Ci + +* ci: kaskada only logs for s3 tests (#510) ([`e0287a6`](https://github.com/kaskada-ai/kaskada/commit/e0287a6c4a196f5d04cd21809fb045c8f6efb5ff)) + +* ci: Only retrieve kaskada logs (#480) + +The other service (Pulsar) is spammy and makes using the logs harder. ([`6deb30d`](https://github.com/kaskada-ai/kaskada/commit/6deb30d7bf9173477fcaa5c99d687d1203192358)) + +* ci: Only retrieve kaskada logs + +The other service (Pulsar) is spammy and makes using the logs harder. ([`f3f05e9`](https://github.com/kaskada-ai/kaskada/commit/f3f05e9a841ee6afc3fac5c80b6ec3b0dabb9270)) + +* ci(docs): run antora in this repo (#414) + +Closes: #397 + +Implements the steps as described in #397 ([`9d7aebf`](https://github.com/kaskada-ai/kaskada/commit/9d7aebf6c0da2a57d2bb53cd11e033849ad30e98)) + +* ci(docs): run antora in this repo ([`9d4c35c`](https://github.com/kaskada-ai/kaskada/commit/9d4c35ce6334d642ebcf5522597c659b6a14caad)) + +* ci(cla check): workaround for making cla check required (#401) + +Cla bot hangs when using it's action as required *and* have merge queue +enabled (https://github.com/finos/cla-bot/issues/210) + +As a workaround, adding a workflow to check for the `cla-signed` label +that is added by the bot once the CLA requirement is met. + +This workflow does *nothing* (skips) in the merge queue since there are +not labels or the ability to alter the merge queue PR being created +behind the scenes. + +This approach does have "back doors" so we may want to revisit +automating the CLA checks with another solution. + +As far as I can tell, we cannot wrap the cla-bot action itself since it +is programmatically generated by the app. ([`17aeaa2`](https://github.com/kaskada-ai/kaskada/commit/17aeaa269ea19272858e7471ee370f7911ed1d3b)) + +* ci(integration tests): testing condition for integration tests (#400) + +The skip condition for integration tests was using `true` instead of +`true` casing integration tests to always run + +NOTE: this PR *will* build and run integration tests due to the changes +being in the workflow file. ([`ad684cc`](https://github.com/kaskada-ai/kaskada/commit/ad684cc06acb1edf13fad06911c1368860c0b63c)) + +* ci(release): login and push release images to DockerHub (#398) + +Adds steps in CI to + +* login to dockerhub using out sustem account and token +* tag images with the appropriate registry `kaskadaio/engine` and +`kaskadaio/jupyter` with `latest` and release version +* pushes to dockerhub + +NOTE: I have manually pushed the current latest release for both engine +and jupyter images + +* https://hub.docker.com/r/kaskadaio/jupyter/tags +* https://hub.docker.com/r/kaskadaio/engine/tags ([`bbeda20`](https://github.com/kaskada-ai/kaskada/commit/bbeda20d47c0e20854719360fe30684f844da3f0)) + +* ci(cla check): workaround for making cla check required ([`a7ec058`](https://github.com/kaskada-ai/kaskada/commit/a7ec0580373669ba35779e5c6e89db7e8fefb0e9)) + +* ci(integtests): fix skip condition ([`373f2c1`](https://github.com/kaskada-ai/kaskada/commit/373f2c1e1dc3fd9450e422affae29964e74e5856)) + +* ci: debugging docker run ([`349aff2`](https://github.com/kaskada-ai/kaskada/commit/349aff296fb005acc566d21deace02f78caf8f1a)) + +* ci: debugging docker run ([`b0fa49f`](https://github.com/kaskada-ai/kaskada/commit/b0fa49fb20390063df47f52149e72079a90c93c1)) + +* ci: debugging docker run ([`28a8510`](https://github.com/kaskada-ai/kaskada/commit/28a85101893dae44e013bd23fede6904906a9c5e)) + +* ci: debugging docker run ([`fd65080`](https://github.com/kaskada-ai/kaskada/commit/fd65080ebd604b21e3288fc566f6f167e4955973)) + +* ci: debugging docker run ([`46eaa47`](https://github.com/kaskada-ai/kaskada/commit/46eaa471bc8697613b2127b6c54b83c725a5df6a)) + +* ci: debugging docker run ([`5a41afa`](https://github.com/kaskada-ai/kaskada/commit/5a41afa7d4ccc66fd0c05304a8dbde2c68b0aa3d)) + +* ci: debugging docker run ([`e4b8ef6`](https://github.com/kaskada-ai/kaskada/commit/e4b8ef662f716e8d6baba1e099708e7fd0d78973)) + +* ci: debugging docker run ([`04f3c7a`](https://github.com/kaskada-ai/kaskada/commit/04f3c7acd41e5406c9e34607dfe71c799d7944d7)) + +* ci: debugging docker run ([`9a8538a`](https://github.com/kaskada-ai/kaskada/commit/9a8538af9f69f79b53239e82031ed3edfe5ca1a3)) + +* ci: set executable permission on api.test ([`6ef52e0`](https://github.com/kaskada-ai/kaskada/commit/6ef52e079da4bf5e477456e02b84a608895f831d)) + +* ci: add characters at start and end of docker tag ([`02da680`](https://github.com/kaskada-ai/kaskada/commit/02da6808c9a91639e0a5fa0c6a89f7d310a046d1)) + +* ci: add permissions to werite to docker registry ([`3d9cc2d`](https://github.com/kaskada-ai/kaskada/commit/3d9cc2dff33fec14e0f1c91897fed2af31eba8b7)) + +* ci: print ci-bin folder contents ([`eca4ea6`](https://github.com/kaskada-ai/kaskada/commit/eca4ea6c4d834182503c78a1dd118d35cd9d5f2f)) + +* ci: print ci-bin folder contents ([`00ca6b9`](https://github.com/kaskada-ai/kaskada/commit/00ca6b9d6ba7186e772f5da9fd0e5b66359b4015)) + +* ci: move Dockerfile ([`e118d10`](https://github.com/kaskada-ai/kaskada/commit/e118d10e0094b685c653ad2b78c25b7c3406b0bf)) + +* ci: use ubuntu 20.04 for engine builds and release ([`ff64ca0`](https://github.com/kaskada-ai/kaskada/commit/ff64ca00ad69bcf4d1aa1c6338a63dc2ce7148fb)) + +* ci: add sources to docker build for docker file access ([`c7d8e6a`](https://github.com/kaskada-ai/kaskada/commit/c7d8e6a4a0ac1cc60eb7f222508e96316caca41f)) + +* ci: adds integration docker file ([`69f5bc0`](https://github.com/kaskada-ai/kaskada/commit/69f5bc0df97d2c7ff01f70d93fa422b681e78bda)) + +* ci: fix docker tag ([`18294cb`](https://github.com/kaskada-ai/kaskada/commit/18294cba36e5522c0ace752d1745699e8ed995b7)) + +* ci: fix docker tag ([`f8fb193`](https://github.com/kaskada-ai/kaskada/commit/f8fb19346edd22fd42e4ad0c7645c1c64c0702a2)) + +* ci: fix docker tag ([`50aeb9a`](https://github.com/kaskada-ai/kaskada/commit/50aeb9a005b0069561ec52c05b4de537f0e8bc02)) + +* ci: fix docker tag ([`75ffb8e`](https://github.com/kaskada-ai/kaskada/commit/75ffb8e7050d1467742701e9eb1a2dc5b4d23da3)) + +* ci: adds integration test CI workflow ([`953fe8a`](https://github.com/kaskada-ai/kaskada/commit/953fe8ab5d02786c1f273d79c6c36f18897988f1)) + +### Documentation + +* docs: Update Slice Filters Documentation (#519) + +Updates the documentation for the usage of the slice filtering. ([`0ca8805`](https://github.com/kaskada-ai/kaskada/commit/0ca88054e37fe7f072ad58dcb5a5fdf885982bc7)) + +* docs(beta): instructions for beta releases (#517) ([`fd03f39`](https://github.com/kaskada-ai/kaskada/commit/fd03f39ef7d3dd6a3d4860b90911c25ab98e084f)) + +* docs(getting-started): update queries (#425) + +* moves code in `examples` +* creates partial pages for fenl code that is reused for cli and +notebook +* we need to wrap the fenl code in order to add `%%fenl` for notebooks +(files prefixed with `nb`) and without `%%fenl` for cli (files prefixed +with `cli`) +* updated the more complex query to +1. Remove the queries dependency on `PageViews` table since we do not +provide data for that table (see #411) +2. Deal with a bug (will file separate issue) when binding `hourly()` to +a name `cadence` ([`921194c`](https://github.com/kaskada-ai/kaskada/commit/921194ca2797c6bbe82d91118ea165f447070817)) + +* docs(getting-started): update queries ([`00335b3`](https://github.com/kaskada-ai/kaskada/commit/00335b34a09c0c0cf669388fd04128da00d7179e)) + +* docs(installation): note on permissions and pip (#406) + +Closes: #233 + +Adds information on `pip install kaskada` on hosted platforms that my +give permission errors. ([`834fd01`](https://github.com/kaskada-ai/kaskada/commit/834fd01653592bea1a1a30b6deaa70243f3d67e6)) + +* docs(installation): note on permissions and pip ([`c031791`](https://github.com/kaskada-ai/kaskada/commit/c0317916cb3a422ba1f7f7f1ce5d8cfb060e74d8)) + +* docs: Update timelines notebook (#288) + +- Use smaller time range so the graphs are easier to read +- Include commands to save to HTML and SVG +- Generate separate images for introducing timelines ([`0ace5ce`](https://github.com/kaskada-ai/kaskada/commit/0ace5ce1cbe023d57bfa3f11411f241f2520eb03)) + +* docs: update to include readthedocs for clients (#356) + +Closes #266. ([`b5ddb9e`](https://github.com/kaskada-ai/kaskada/commit/b5ddb9e6a3740ebaa16ad85a1201d12a664a6532)) + +* docs: Add GCP Configuration to Loading Data (#354) + +Adds loading data to the docs. ([`b9a4057`](https://github.com/kaskada-ai/kaskada/commit/b9a4057311d5f4d04861201faf8504f48e1f4825)) + +* docs: Initial input/output page (#339) + +Create an initial list of supported sources and sinks. + +We should probably include the object store destination here, but this +is meant as a starting place so that we have a convenient place to link +when saying things like "Kaskada supports a variety of sources and +sinks". ([`9a38563`](https://github.com/kaskada-ai/kaskada/commit/9a38563aa1af6597e096a28ca8c0a9880bba0514)) + +* docs: Initial input/output page ([`0620082`](https://github.com/kaskada-ai/kaskada/commit/06200820beb875659cd3d91193d74d6261f9044d)) + +* docs: update Kaskada CLI docs with results (#337) + +Updates the Kaskada CLI hello world with the results of running the +sample queries. ([`4887f86`](https://github.com/kaskada-ai/kaskada/commit/4887f86ee37141c63cdfdcac7669ec6bc52714cb)) + +* docs: execute command cli (#334) + +Updates the cli docs to run the local `./` to match the other usages of +the cli. ([`0018048`](https://github.com/kaskada-ai/kaskada/commit/0018048592cf07991beec7111773aefee36956ea)) + +* docs: Kaskada CLI optional PATH (#333) + +Updates the docs to make setting the CLI Path to optional. ([`a6e5115`](https://github.com/kaskada-ai/kaskada/commit/a6e511582d93af37ee8dfa613debfc6d340bef52)) + +* docs: extra ` in CLI onboarding (#332) + +Update docs to remove an extra `. ([`6a61292`](https://github.com/kaskada-ai/kaskada/commit/6a61292126eb7717bd988169509e317444cd6d5d)) + +* docs: cli echo path fix (#331) + +Updates the hello world cli to use the correct echo path command. ([`e91a0c7`](https://github.com/kaskada-ai/kaskada/commit/e91a0c7a11970c728625405be718f8ecf15945be)) + +* docs: update Kaskada CLI docs with results ([`5a0dce2`](https://github.com/kaskada-ai/kaskada/commit/5a0dce2c84b613d59bf14885e302a91f4e2cc3b2)) + +* docs: execute command cli ([`d412929`](https://github.com/kaskada-ai/kaskada/commit/d412929e8d77ccd0c5654c510b992daaaf97c258)) + +* docs: extra ` in CLI onboarding ([`7d758f8`](https://github.com/kaskada-ai/kaskada/commit/7d758f82607b554e796ccd078ca7a759ef18a852)) + +* docs: cli echo path fix ([`419d99e`](https://github.com/kaskada-ai/kaskada/commit/419d99eea89c2fdd8e0f84bbe7c54183e17617e5)) + +* docs: Update timelines notebook + +- Use smaller time range so the graphs are easier to read +- Include commands to save to HTML and SVG +- Generate separate images for introducing timelines ([`c5730b6`](https://github.com/kaskada-ai/kaskada/commit/c5730b66b041d01ec6a15860658ac66a34772906)) + +* docs: Further refine the timelines notebook + +- [x] Allow per-timeline data +- [x] Handle categorical data (and fix image) +- [x] Render continuous arrow to the right + +Next steps +- [ ] Use per-timeline data to generate remaining images +- [ ] Attempt to use different colors/marks for different entity types + +Deferred + +- (Not currently possible) Show vertical cursor across plots (highlight + continuous value at the same time) +- (Not currently possible) Show all points on a vertical line for + continuous timelines. ([`e721300`](https://github.com/kaskada-ai/kaskada/commit/e72130023b09f326eca9ccbffe658a38b98e200d)) + +* docs: Add example based on timeline presentation + +This currently has a partial implementation of timeline visualization +(with some rough edges identified in the notebook) to be worked on. + +Once complete this notebook can be used to generate the SVGs (and an +embeddable version) of the timeline examples. ([`766dd4d`](https://github.com/kaskada-ai/kaskada/commit/766dd4d03e39cc1477cf1321c0692f4bb1ca7685)) + +* docs: add a getting started page ([`924e7df`](https://github.com/kaskada-ai/kaskada/commit/924e7df51bddd377982599b43dcbb5e1d9e5d198)) + +* docs: update readme ([`cbf1851`](https://github.com/kaskada-ai/kaskada/commit/cbf18516c95b17f83880e30d1321d79da12b30b9)) + +* docs: change source code language to fenl ([`d1f4ada`](https://github.com/kaskada-ai/kaskada/commit/d1f4adacbbe43b37f689b4400a9015b131e9640a)) + +* docs: fixes titles and updates quick start ([`5c334ab`](https://github.com/kaskada-ai/kaskada/commit/5c334ab2973551968679bf2758d4c289bde42bc9)) + +* docs: comment out non-working collab tutorials ([`bbbc935`](https://github.com/kaskada-ai/kaskada/commit/bbbc935bcdf4d9d0c61e67ac71217bceb50cb17d)) + +* docs: kaskada in jupyter quick setup ([`49d5af8`](https://github.com/kaskada-ai/kaskada/commit/49d5af8edec33f20c85ee7ef9f9038222a323917)) + +### Feature + +* feat: Use new hashing (#545) + +This uses the new Hasher based implementation, that should be faster +(avoids re-allocating the buffer) and supports structs. + +This closes #211. +This closes #462. +This closes #525. + +--------- + +Co-authored-by: Eric Pinzur <epinzur@gmail.com> +Co-authored-by: Jordan Frazier <jordan.frazier@datastax.com> ([`dd2136d`](https://github.com/kaskada-ai/kaskada/commit/dd2136dd9affc40cbf03243dc25f118b385df4f6)) + +* feat: Initial partitioned execution (#528) + +This introduces the key components of partitioned execution. + +- `sparrow-scheduler` provides functionality for managing the separate +pipelines within the query plan and morsel-driven parallelism. It +managing a thread-pool of workers pinned to specific CPUs pulling tasks +from local queues. +- `sparrow-transforms` will provide implementations of the "transforms" +(project, select, etc.) and a pipeline for executing the transforms. +- `sparrow-execution` will pull everything together to provide +partitioned execution. + +This is part of #409. ([`71bfc2c`](https://github.com/kaskada-ai/kaskada/commit/71bfc2ca4ae554436ec42dd3a3f478d307a21eea)) + +* feat: add unlatched map spread and largeutf8 spreads (#529) + +Adds unlatched map spread to the merge operation, which allows us to +have non-stateful `DataType::Map` types in `merge`. + +Snuck in support for `LargeUtf8`. ([`74ed812`](https://github.com/kaskada-ai/kaskada/commit/74ed812ad667e050e72c77f93c63b0c63702c31a)) + +* feat: support first and last non-windowed aggregations over DataType::Map (#540) + +Support non-windowed aggregations `first` and `last` over map types. ([`a57eb1b`](https://github.com/kaskada-ai/kaskada/commit/a57eb1b4f84eb661c039e4917b981f2f378bbb4a)) + +* feat: Change catalog to produce asciidoc (#537) ([`968dbc6`](https://github.com/kaskada-ai/kaskada/commit/968dbc6be50e2ca6b40e3630e4dafba09bfee1b6)) + +* feat: Add serde support for arrays (#543) + +Also adds tests for `ArrayRef` and `MapArray` serialization. ([`58c7351`](https://github.com/kaskada-ai/kaskada/commit/58c735179fb414d084e9ae108efbf8a87ac208fb)) + +* feat: support more types for get evaluator for maps (#532) + +Uses get index kernels to simplify evaluator types. Supports +`Boolean/String/Primitive` types for `get` on maps. + +Part of https://github.com/kaskada-ai/kaskada/issues/494 ([`1037a91`](https://github.com/kaskada-ai/kaskada/commit/1037a9100e0dd32c1683135ee863b8ec8ec8f418)) + +* feat: add `get` function for maps (for string -> primitive) (#500) + +Adds the `get` function for maps for string and large_string to +primitive types. + +Further additions can support more types. See +https://github.com/kaskada-ai/kaskada/issues/494 for task list + +Testing: +* verified that the sample map data (with a largeutf8) produces results +as expected. ([`c700afe`](https://github.com/kaskada-ai/kaskada/commit/c700afe114b6bac4d2bd92659a9438f395f36223)) + +* feat: Use object store for rocksdb and debugging (#503) + +This uses `object_store` for uploading/downloading snapshots and the +query plan and flight record (debugging) information. + +This removes the `s3` module, the `S3Helper` and the direct dependencies +on AWS. + +This is part of #465. + +--------- + +Co-authored-by: Eric Pinzur <epinzur@gmail.com> +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`5728ab8`](https://github.com/kaskada-ai/kaskada/commit/5728ab88f8e4ce31eca2011241f8027a2e32f386)) + +* feat: Use object_store to write files (#492) + +Also introduces paged file output. + +This does a hacky CSV write by buffering a batch at a time. + +This is related to #486. +This is part of #465. +This is part of #466. ([`b0c86bf`](https://github.com/kaskada-ai/kaskada/commit/b0c86bfa243ea825241614a210d22eefa37abf73)) + +* feat: Use object_store to read prepare inputs (#495) + +For CSV, this uses object store to copy the file to local disk, since we +need to infer the schema from the entire file, and the CSV inference +doesn't support a streaming mode. + +For Parquet, this uses streaming, async reads. + +This is part of #465. + +This also introduced `ObjectMetaExt` to make it possible to get a hash +of the etag. ([`e614bea`](https://github.com/kaskada-ai/kaskada/commit/e614bea8e15e47abe62cf272cd6b6f9e2bbe86f6)) + +* feat: support type inference for map<k,v> (#482) + +Adds type inference for map types. Does not yet implement any functions +for maps. ([`9c70288`](https://github.com/kaskada-ai/kaskada/commit/9c7028841c8fe7fb7d93ce9053092d11fb89fb8c)) + +* feat: add map type inference + +this adds support for map<k,v> type parsing in the grammar and +inference ([`95ed8e5`](https://github.com/kaskada-ai/kaskada/commit/95ed8e5f4dfb469235eb873e282bd2431081cec8)) + +* feat: Use object_store to read prepare inputs + +For CSV, this uses object store to copy the file to +local disk, since we need to infer the schema from the +entire file, and the CSV inference doesn't support a +streaming mode. + +For Parquet, this uses streaming, async reads. + +This is part of #465. + +This also introduced `ObjectMetaExt` to make it possible to get a +hash of the etag. ([`079cc5c`](https://github.com/kaskada-ai/kaskada/commit/079cc5cd5aadd01dab70f1aa9b7e4dc3c7fb5510)) + +* feat: Use object_store to write files + +Also introduces paged file output. + +This does a hacky CSV write by buffering a batch at a time. + +This is related to #486. +This is part of #465. +This is part of #466. ([`7b81dc4`](https://github.com/kaskada-ai/kaskada/commit/7b81dc477f93154fafb0abf30c2e17ffb2831c5a)) + +* feat: Kafka Backed Sources API Proposal (#458) + +# API Proposal + +Adds Kafka backed sources to the API. Most of the configurations are +based on the Rust +[client](https://docs.rs/kafka/latest/kafka/consumer/struct.Builder.html). ([`02144bd`](https://github.com/kaskada-ai/kaskada/commit/02144bd2c7b6dc0d43ece474302a3f68581f80cc)) + +* feat: Use metadata for retrieving the schema (#479) + +This is part of #465. + +This also simplifies the `ObjectStore` API to hide the `ObjectStoreKey`. ([`250ab5b`](https://github.com/kaskada-ai/kaskada/commit/250ab5bafce7e00033e310190cffa53a353b2d7a)) + +* feat: expression evaluation for new plans (#463) + +This introduces the basic evaluator mechanisms for the new physical +plans. As part of this, some aspects of the physical expression +representation changed. Specifically, allowing a function call to have +zero or more literal arguments allowed every expression to be "simple". + +This also copied most of the existing evaluators into the new API. This +was done by copying to avoid overhead involved in refactoring. The plan +is to get the new runtime path based on these working (and passing the +same tests) and then delete the old path. + +This doesn't copy over some of the scalar functions (comparisons) or all +of the time functions. Those can be ported over in the future once the +runtime is in place. + +This also doesn't copy over any stateful operations (aggregations) yet +since aggregation may need to use a different approach to interface with +the physical plans. ([`70b4fc9`](https://github.com/kaskada-ai/kaskada/commit/70b4fc90eb5401d9225f1826d5db1b987226f951)) + +* feat: Use metadata for retrieving the schema + +This is part of #465. + +This also simplifies the `ObjectStore` API to hide the `ObjectStoreKey`. ([`7c2bec5`](https://github.com/kaskada-ai/kaskada/commit/7c2bec573ec0626a4ac435e9cb18fade8e4b2f0d)) + +* feat: Prepare directly to object stores (#475) + +This is part of #465 and serves as the first example of using +`object_store` and the async Parquet writer to write directly to object +stores. ([`8dbc359`](https://github.com/kaskada-ai/kaskada/commit/8dbc35934faee4ea746bb9f6581495981e089033)) + +* feat: Use object_store for metadata (#476) + +This is part of #465. + +This uses `object_store` to retrieve the metadata files. Additionally, +it uses a `select_all` so that all metadata files are read concurrently. ([`74e6299`](https://github.com/kaskada-ai/kaskada/commit/74e629970449b38dc46a152eb7879279c122c44f)) + +* feat: Use object_store for metadata + +This is part of #465. + +This uses `object_store` to retrieve the metadata files. Additionally, +it uses a `select_all` so that all metadata files are read concurrently. ([`9220e7d`](https://github.com/kaskada-ai/kaskada/commit/9220e7dec994f0f190c7a19928440c007fa939ad)) + +* feat: Prepare directly to object stores + +This is part of #465 and serves as the first example of using +`object_store` and the async Parquet writer to write directly to +object stores. ([`0665136`](https://github.com/kaskada-ai/kaskada/commit/0665136ca963a1db54dfe10820524081ce5c7d2f)) + +* feat: Read directly during compute (#471) + +This is part of #465 -- specifically, reading directly using +`object_store` during compute. ([`7858a62`](https://github.com/kaskada-ai/kaskada/commit/7858a62bc26c4ffd2451336d6d4dee82bd393fab)) + +* feat: Read directly during compute + +This is part of #465 -- specifically, reading directly using +`object_store` during compute. ([`604226d`](https://github.com/kaskada-ai/kaskada/commit/604226d25530db90a2f81c8324443c0f08a95d26)) + +* feat: use explicit type variable in signatures (#457) + +Step 1 of many to support generic types for collections. + +Adds type variable struct to signatures. + +Next step supports nested generics, like `Map<K,V>` ([`8e86e21`](https://github.com/kaskada-ai/kaskada/commit/8e86e21043ce9af7cfb2e7cbba74314d2d440539)) + +* feat: expression evaluation for new plans + +This introduces the basic evaluator mechanisms for the new physical +plans. As part of this, some aspects of the physical expression +representation changed. Specifically, allowing a function call to +have zero or more literal arguments allowed every expression to be +"simple". + +This also copied most of the existing evaluators into the new API. +This was done by copying to avoid overhead involved in refactoring. +The plan is to get the new runtime path based on these working (and +passing the same tests) and then delete the old path. + +This doesn't copy over some of the scalar functions (comparisons) or +all of the time functions. Those can be ported over in the future once +the runtime is in place. + +This also doesn't copy over any stateful operations (aggregations) yet +since aggregation may need to use a different approach to interface with +the physical plans. ([`7128668`](https://github.com/kaskada-ai/kaskada/commit/712866859d0f19bb7e5f7fa2233f3b7d6e51f0fd)) + +* feat: Introduce a Batch wrapper + +Currently, we use a few different batches in different places during +execution. This doesn't yet replace any of those, however it introduces +a single batch representation that supports the multiple uses. This is +based on the batch used in a prototype that plumbs a single kind of +batch everywhere in execution. + +Future PRs will update existing code and/or pull in prototype code using +the Batch, with the goal of replacing the different batch +representations. ([`5387ea8`](https://github.com/kaskada-ai/kaskada/commit/5387ea8ad0634831094c485c09eddefc9b5d5ebc)) + +* feat: materialization API sparrow impl (#388) + +Implements the `Start, Get, and Stop` API for materializations. + +This will allow users to interact with materializations through Wren +(and subsequently through python in jupyter notebooks). ([`0061770`](https://github.com/kaskada-ai/kaskada/commit/0061770915668f56b5a40be44fd1ca412ce6e59f)) + +* feat: introduce pipeline scheduler (#413) + +This is part of #409. + +Introduces `Pipeline` information to the physical plan. This indicates +which steps are part of a linear sequence, and should (ideally) be +executed together. + +Also implements a pipeline "scheduler" to determine the pipeline for +each step, in a new `sparrow-backend` crate. As the physical plan is +built-up, the code should go in this "compiler backend" package, which +can own optimization and conversion of logical plans to physical plans. ([`6f88d31`](https://github.com/kaskada-ai/kaskada/commit/6f88d318ca26b4c93e7822983a628788a28b676b)) + +* feat: introduce pipeline scheduler + +This is part of #409. + +Introduces `Pipeline` information to the physical plan. This indicates +which steps are part of a linear sequence, and should (ideally) be +executed together. + +Also implements a pipeline "scheduler" to determine the pipeline for +each step, in a new `sparrow-backend` crate. As the physical plan is +built-up, the code should go in this "compiler backend" package, which +can own optimization and conversion of logical plans to physical plans. ([`07f69e0`](https://github.com/kaskada-ai/kaskada/commit/07f69e06b33d89b605c1a7c4a94800196942b8d4)) + +* feat: introducing physical plans (#410) + +This is part of #409. ([`165a426`](https://github.com/kaskada-ai/kaskada/commit/165a4261b50357db38d9c3af1815dfca63349c0e)) + +* feat: introducing physical plans + +This is part of #409. ([`de8721f`](https://github.com/kaskada-ai/kaskada/commit/de8721f96a7e21b35463df5d30b4724df49736ad)) + +* feat: Remote Session Improvements (python) (#395) + +# Remote Session Improvements + +Updates the python client to support a remote session and adds the docs +to configure it. ([`a76693c`](https://github.com/kaskada-ai/kaskada/commit/a76693cad49a27b5cf5affaad050fbc40f2ce63e)) + +* feat: real-time streaming poc from cli (#366) + +This adds a materialize command that allows creating a long-lived +process from the cli. + +This is not stateful yet - control-c will close the stream and won't +start back up where you stopped. Since we currently `ack` all messages +on read, any new materializations will only read new data. ([`78c10f2`](https://github.com/kaskada-ai/kaskada/commit/78c10f2ed654c04be9bae97082c12d58129d42f9)) + +* feat: Kaskada Health Checks + Auto Recovery for Python (#254 / #267) (#373) + +# Description + +Adds health checks to the Python client and improves overall code +syntax. + +## Changes + +* Adds `grpcio-health-checking` to the Python `pyproject.tml`. +* Updates the pylint max line to 200 (easier to read) +* Replaces `api_utils.run_subprocess` with a `KaskadaLocalService` and a +`SubprocessFactory`. +* Replaces `api_utils.check_socket` with an actual health check. +* Updated subprocess runs to synchronously terminate with the `stop()` +method. +* Add a keep alive watcher to automatically recover from failure. +* Update the connection retry logic from the session to the client. +* Create a `LocalSession` object to wrap around the Kaskada services to +enable restarting. +* Updates client docs and organization +* Adds a `HealthCheckClient`. +* Adds a `HealthCheckServicer`. +* Adds a `HealthCheckWatcher`. ([`a91566e`](https://github.com/kaskada-ai/kaskada/commit/a91566e2622d18cad99b8506e633c462b61c6e28)) + +* feat: Kaskada Health Checks + Auto Recovery for Python (#254 / #267) ([`0b146c6`](https://github.com/kaskada-ai/kaskada/commit/0b146c6b4a91303195c48060669eab60c5697205)) + +* feat: add fenlmagic extension loaded message (#305) (#340) + +Closes #305. + +<img width="464" alt="Screenshot 2023-05-08 at 5 45 43 PM" +src="https://user-images.githubusercontent.com/15032894/236953606-48c02e42-c773-4bf9-8e50-d4960d7850eb.png"> ([`91b0f15`](https://github.com/kaskada-ai/kaskada/commit/91b0f152ac3e017b154a7ee70a3cda82e4f4830f)) + +* feat: Re-enable the QFR tool ([`fd92859`](https://github.com/kaskada-ai/kaskada/commit/fd928593cac35b4c919815cf9fa1618a8109bb6b)) + +* feat: Re-introduce generalized Flight Recorder + +Remaining work: +- [ ] Fix or remove broken sparrow code +- [ ] Update chrome tracing to use new protobufs +- [ ] Support for "top-level" metrics (not associated with an activity) +- [ ] Add a few more tests ([`98b6c53`](https://github.com/kaskada-ai/kaskada/commit/98b6c533a4c0150b78863df9774496e4c358d95e)) + +### Fix + +* fix: clippy warnings in collection_tests (#496) ([`119c15b`](https://github.com/kaskada-ai/kaskada/commit/119c15b1deb7eec7d1f4e18eb96790e470ce9366)) + +* fix: clippy warnings in collection_tests ([`b94e3ae`](https://github.com/kaskada-ai/kaskada/commit/b94e3ae2399f9281a96db3713bdfe1fb33109874)) + +* fix: feature flags in main (#452) ([`8241504`](https://github.com/kaskada-ai/kaskada/commit/824150422db6c16f4914f28eae23b156af35eded)) + +* fix: feature flags in main ([`d8efb0d`](https://github.com/kaskada-ai/kaskada/commit/d8efb0d8154a2bd6ec09ee8244c5805df377caee)) + +* fix: typo in error status ([`681e558`](https://github.com/kaskada-ai/kaskada/commit/681e55895693a877d106c26f4bb81b5844a29f1a)) + +### Test + +* test: add ignored duplicate row test (#456) + +https://github.com/kaskada-ai/kaskada/issues/524 ([`085c3a0`](https://github.com/kaskada-ai/kaskada/commit/085c3a0dc36fd9a2e3c4d092931d4591f259a8a1)) + +* test: Use gproto matchers for schemas (#460) + +This uses a `gproto.Equal` matcher for the primitive schemas rather than +just relying on them being equal. + +It turns out, that protocol buffers have some "hidden" fields and won't +always be actually equal (according to gomega's `Equal` matcher). + +For instance -- printing a message before the assertion definitely +causes it to fail. It seems like other cases may also cause it to fail +(flakiness we were seeing after some other changes). + +We should probably switch to using this anywhere we were matching +protobufs. ([`cce761c`](https://github.com/kaskada-ai/kaskada/commit/cce761cceb3300ea6acfb66181ab31fbb0eab736)) + +* test: Use gproto matchers for schemas ([`978f38d`](https://github.com/kaskada-ai/kaskada/commit/978f38d857a720cd87a9302898dbeb5873e0d79c)) + +* test: adds missing method impls for test materialization clients ([`0aa0874`](https://github.com/kaskada-ai/kaskada/commit/0aa0874c591f1bdfaca6e23d85fe21b780f83362)) + +### Unknown + +* engine release 0.10.0 (#554) ([`c186ae9`](https://github.com/kaskada-ai/kaskada/commit/c186ae924d22268247571bdf33e143c66e7579a7)) + +* added docs for installing via helm (#530) + +moves `installing` to its own sub-menu. ([`8c9a02b`](https://github.com/kaskada-ai/kaskada/commit/8c9a02b05410eb505925d8e16c2a89f888df15a9)) + +* moved helm chart to new repo (#539) + +did this to use the standard helm-chart releasing tools. + +you can now do: +``` +helm repo add kaskada https://kaskada.io/helm-charts/ +helm repo update +helm search repo kaskada +``` + +and get back: +``` +NAME CHART VERSION APP VERSION DESCRIPTION +kaskada/kaskada-canary 0.0.2 Kaskada is a query engine for event-based (time... +``` + +Amazing! ([`2cc43e3`](https://github.com/kaskada-ai/kaskada/commit/2cc43e3ea0707f727e530cbaefa9b6c8b7846096)) + +* return actual error instead of 'internal error' (#541) ([`74c050b`](https://github.com/kaskada-ai/kaskada/commit/74c050b0b1621f305da18a847e4df308bde657cc)) + +* Python 0.5.1 Version Bump (#531) + +Bump minor version for the Python release. ([`a387993`](https://github.com/kaskada-ai/kaskada/commit/a387993938915a94409a2f549833667c8abd5763)) + +* Entity Key Filter Integration Test (#520) (#521) + +Adds a test for the entity key filter ([`da9eac9`](https://github.com/kaskada-ai/kaskada/commit/da9eac982063cdf05a8924110ea4f43c13b3dd7a)) + +* draft: various helm chart improvements (#464) + +* fix running without object storage (confirmed via test in EKS) +* add access-key/secret-id support for AWS S3 access (confirmed via test +in EKS) +* progress on AWS IRSA s3 access +* we need to make a code change to wren (and maybe sparrow) before I can +confirm that IRSA access is working. See #527 + +fixes: #427 ([`48cef56`](https://github.com/kaskada-ai/kaskada/commit/48cef565292019ce275e3087b7964943c7ab33b3)) + +* updated versions for release (#523) ([`1421def`](https://github.com/kaskada-ai/kaskada/commit/1421def1cab6c563480550a65d8f23332c7b8613)) + +* engine beta release fixes (#516) + +* create [beta release on +GitHub](https://github.com/kaskada-ai/kaskada/releases/tag/engine%40v0.9.0-beta.1) +* pushed docker images for engine and jupyter for both GitHub and +DockerHub +* https://github.com/kaskada-ai/kaskada/pkgs/container/kaskada%2Fengine +* https://github.com/kaskada-ai/kaskada/pkgs/container/kaskada%2Fjupyter +* +https://hub.docker.com/layers/kaskadaio/engine/v0.9.0-beta.1/images/sha256-b5496497e718f557eade6a15e24435005cc56066a22146c8767179b2719809c6?context=explore +* +https://hub.docker.com/layers/kaskadaio/jupyter/v0.9.0-beta.1/images/sha256-8ccca38221d1b0e3a6dadac7ec785c7a5db0f0aa82a00c7b0bf29d71a776a43f?context=explore + +Will follow up with a small PR on documentation. ([`9fa69fc`](https://github.com/kaskada-ai/kaskada/commit/9fa69fc896bf6fced2ce56d7d170572f60c8d89e)) + +* Python client beta release (#515) + +[Resulting +release](https://github.com/kaskada-ai/kaskada/releases/tag/python%40v0.5.0-beta.1) ([`59a2096`](https://github.com/kaskada-ai/kaskada/commit/59a2096a217741ffb537549c78e860e9ec9d498c)) + +* skips pypi for beta release flow (#511) ([`810c255`](https://github.com/kaskada-ai/kaskada/commit/810c255b9dec618813ebcb75b5dbf81458f43169)) + +* adds beta release ci flow (#509) + +Adds a beta GitHub release flow. + +The beta release does not have any comments on the release page for now. ([`ffa05be`](https://github.com/kaskada-ai/kaskada/commit/ffa05bedc28aae5c08c53b13f302d7b7dd8b523b)) + +* fixed gcs file loading and query (#507) + +resolves #504 ([`b4649a8`](https://github.com/kaskada-ai/kaskada/commit/b4649a8f2d759d28da5f60ac2a759f5ac5dfd88e)) + +* Merge branch 'main' into fix_gcp ([`31dea43`](https://github.com/kaskada-ai/kaskada/commit/31dea43faf87d67b30b7fd25a1c1938e2f44d994)) + +* adds beta release ci flow ([`3955f43`](https://github.com/kaskada-ai/kaskada/commit/3955f43bf3bfe4873ad3679c653149b3fd162e27)) + +* lint: Run `clippy --fix` (#506) + +This fixes warnings appearing in the new version of Rust. ([`b8317af`](https://github.com/kaskada-ai/kaskada/commit/b8317afde25412691823fec1c2bae01e9de6b267)) + +* fixed gcs file loading and query ([`2ef5dac`](https://github.com/kaskada-ai/kaskada/commit/2ef5dacaa4240f04413c2c9dc5efa4aec4faf15d)) + +* lint: Run `clippy --fix` + +This fixes warnings appearing in the new version of Rust. ([`95f823e`](https://github.com/kaskada-ai/kaskada/commit/95f823e2269534e80a5b044a3f531cdf74a7f169)) + +* ref: Cleanup object_store code a bit (#501) + +Move `upload` and `download` to the root `ObjectStoreRegistry`. + +Move `object_store_key` to a separate file and make that an +implementation detail. + +This is part of cleanup after most of #465. ([`b05855f`](https://github.com/kaskada-ai/kaskada/commit/b05855ff63a643d506fef1d80aa233db3234283d)) + +* ref: Cleanup object_store code a bit + +Move `upload` and `download` to the root `ObjectStoreRegistry`. + +Move `object_store_key` to a separate file and make that an +implementation detail. ([`6fe96c9`](https://github.com/kaskada-ai/kaskada/commit/6fe96c93a1de473042af3aed6477719e1544ad86)) + +* docs(install) updates instructions for installation (#498) + +* moves the commands into `install/install.sh` file +* updates installation instructions in our docs ([`f9fec6d`](https://github.com/kaskada-ai/kaskada/commit/f9fec6db236119040124990529e61402a42e1d36)) + +* docs(install) updates instructions for installation ([`a2b3548`](https://github.com/kaskada-ai/kaskada/commit/a2b3548d1daee2a63c297db768bc2d627d483206)) + +* Merge branch 'main' into object_store_compute_write ([`c719314`](https://github.com/kaskada-ai/kaskada/commit/c7193142ef9a4e3737de75b10e5ffeedda155556)) + +* ensure wren always sends paths with trailing slash ([`6bab1e5`](https://github.com/kaskada-ai/kaskada/commit/6bab1e5f4554d52c8c69b9cf8d2622126abe9a57)) + +* Merge branch 'main' into object_store_compute_write ([`88de931`](https://github.com/kaskada-ai/kaskada/commit/88de931e2df5b0d073baf3326dde1f86dcb609f4)) + +* Merge branch 'main' into object_store_compute_write ([`39b4bd9`](https://github.com/kaskada-ai/kaskada/commit/39b4bd9ff036ae127d986524b9121e1fbe268b8d)) + +* comments ([`12c93dc`](https://github.com/kaskada-ai/kaskada/commit/12c93dc4edf3febcc00b997304162cf7281c6595)) + +* Merge branch 'main' into object_store_during_prepare ([`ad9f769`](https://github.com/kaskada-ai/kaskada/commit/ad9f7697ee247ad766e0d0f9d5f301b65a21f507)) + +* typo ([`c455879`](https://github.com/kaskada-ai/kaskada/commit/c4558792bbfe914abd6db0f4af2bdc3fc51141b9)) + +* comments ([`2b2eae0`](https://github.com/kaskada-ai/kaskada/commit/2b2eae00164119d1eacd99a0b1860b835ff3cfa5)) + +* comments ([`1e3ae85`](https://github.com/kaskada-ai/kaskada/commit/1e3ae8535db5b4a3e8d67a4a2e87518992f5122e)) + +* review comments ([`7aa169e`](https://github.com/kaskada-ai/kaskada/commit/7aa169ede4b41186a53300c63047bacfda8ca91b)) + +* fmt ([`f294af8`](https://github.com/kaskada-ai/kaskada/commit/f294af8c75fa2ace0241ef6170344be2ff78af56)) + +* Update fenl type naming ([`8827f00`](https://github.com/kaskada-ai/kaskada/commit/8827f0048ec81dbae9a23dc44f26c8beef7e9203)) + +* Merge branch 'main' into object_store_compute_write ([`82a1e92`](https://github.com/kaskada-ai/kaskada/commit/82a1e9271b1b1503001410ee90aab9b769888468)) + +* #484 - Python Client Object Store Configs (#489) + +# Python Client Object Store Configs + +Adds the ability to configure the object store for the manager by +updating the local session object. + +## Example Usage (Local) +```python +from kaskada.api.local_session.object_store import ObjectStoreType +from kaskada.api.local_session.object_store import ObjectStoreConfig + +my_store_type = ObjectStoreType.LOCAL +my_config = ObjectStoreConfig(my_store_type, "/Users/kevin.nguyen/Github/kaskada/examples2") + +from kaskada.api.session import LocalBuilder +session = LocalBuilder()\ + .object_store(my_config)\ + .build() +``` + +## Example Usage (S3) +```python +my_store_type = ObjectStoreType.S3 +my_config = ObjectStoreConfig(\ + my_store_type, \ + "/my-prefix-path", \ + bucket = "example-bucket") +``` ([`ad103fe`](https://github.com/kaskada-ai/kaskada/commit/ad103fe90a95878529b70df9b64336a7dc91471f)) + +* improved reliability of integration tests (#490) + +resolves #474 + +progress on #473, #459 ([`f626c90`](https://github.com/kaskada-ai/kaskada/commit/f626c906f9eae5336d4364cc35845d655b36d102)) + +* formatting ([`f650285`](https://github.com/kaskada-ai/kaskada/commit/f650285a9f790df69bfa186ddb02706c94cdd978)) + +* Merge branch 'main' into integration/flakiness ([`8fe82b5`](https://github.com/kaskada-ai/kaskada/commit/8fe82b5974103249063e6d88c20f0b0c35c04fa5)) + +* code review comments ([`b05b76b`](https://github.com/kaskada-ai/kaskada/commit/b05b76bb269d2a9aa36c2d0a0aa9ab512c85b668)) + +* Merge branch 'main' into feature/484-python-object-store ([`ddff6c2`](https://github.com/kaskada-ai/kaskada/commit/ddff6c28084a16a0cb8fabf3cb202faefb356a0f)) + +* improved reliability of integration tests ([`4acddbe`](https://github.com/kaskada-ai/kaskada/commit/4acddbe04e2bddb4ea577927fc69586e35cd9c5f)) + +* Merge branch 'main' into feature/484-python-object-store ([`8a35563`](https://github.com/kaskada-ai/kaskada/commit/8a35563eeef5498657e41de5b5d9647fa6062973)) + +* code review comments ([`c433a7b`](https://github.com/kaskada-ai/kaskada/commit/c433a7b5bc4b9b62c8e72cf93c696d9046d4b80d)) + +* fixes wren unit test flakiness (#488) + +resolves #477 + +Ran `go run github.com/onsi/ginkgo/v2/ginkgo --until-it-fails ./...` +until over 500 test results in a row worked without failure ([`4e513e0`](https://github.com/kaskada-ai/kaskada/commit/4e513e0d01c8338b478e4fa663e2ad550ab7858c)) + +* update api tests ([`6f99f94`](https://github.com/kaskada-ai/kaskada/commit/6f99f940ef6596d754842d649d9d8d1416119271)) + +* update test matcher ([`75c599f`](https://github.com/kaskada-ai/kaskada/commit/75c599f1ec774d1b30d573887557359ac3db5ffa)) + +* added changes ([`0346d20`](https://github.com/kaskada-ai/kaskada/commit/0346d207938c39d5c3105e98da5afbe201d9381b)) + +* clippy ([`7872dc1`](https://github.com/kaskada-ai/kaskada/commit/7872dc13f00c5c1caa7e3857e5b4d8e7c5c19561)) + +* fixed wren unit test flakiness ([`19fb355`](https://github.com/kaskada-ai/kaskada/commit/19fb355a27bf5e572b038b0147db85dda1f6368f)) + +* changed default behavior of file-load to leave files in-place (#472) + +New option `copy_to_filesystem` added to revert to the old behavior. +@kevinjnguyen we will need to add this option to the python client too. ([`ad3c5ec`](https://github.com/kaskada-ai/kaskada/commit/ad3c5ecca76c418612838e2e85bd05be9c0a39a2)) + +* fixed checking status on object store item not owned by kaskada ([`73d27b6`](https://github.com/kaskada-ai/kaskada/commit/73d27b603f3c9ee186c4af7781afe8945ceb92d8)) + +* fixed errors when files change or are removed ([`f5c4cad`](https://github.com/kaskada-ai/kaskada/commit/f5c4cad1b9c803cf6aa043fe064ad7fd851af686)) + +* minor: cleanup comment in expressions (#487) ([`1fe19cc`](https://github.com/kaskada-ai/kaskada/commit/1fe19cc929990664a4bbba272c7f82ef9f9bc942)) + +* add todo ([`377c074`](https://github.com/kaskada-ai/kaskada/commit/377c074059733febacb5324981658de71cda335d)) + +* minor: cleanup comment in expressions ([`49dd6b0`](https://github.com/kaskada-ai/kaskada/commit/49dd6b07ecd6ac6e8fedb4505dcd055ffa19e485)) + +* Merge branch 'main' into feature/kafka-api-proposal ([`92ef1a7`](https://github.com/kaskada-ai/kaskada/commit/92ef1a7e646ba862a91823c4eb812b3dca123dce)) + +* #466 feat: Python Client Supports Multiple File Output (#478) + +Updates the Python client to support multiple file on output. Writing +the tests for this proved difficult so I went with manually testing: a +single result ✅ and multiple results ✅ ([`741bfe2`](https://github.com/kaskada-ai/kaskada/commit/741bfe283d3dea7aa1f47167bd598ea03bc4cd41)) + +* Merge branch 'main' into feature/kafka-api-proposal ([`92de61d`](https://github.com/kaskada-ai/kaskada/commit/92de61d310fb9994bc5bd12c19ad451b9b6bed2a)) + +* Merge branch 'main' into feature/466-multiple-file-python-support-concat ([`41f7555`](https://github.com/kaskada-ai/kaskada/commit/41f7555e303f3bed5bc16234d8aef5260b4fe2f6)) + +* poetry format ([`f893e73`](https://github.com/kaskada-ai/kaskada/commit/f893e735bcde6e225a7152a93377c33283c807cb)) + +* simplified docker-compose for ci, upgraded pulsar in ci (#483) + +I'm hoping upgrading to pulsar 3.0.0 in CI resolves some of our pulsar +integration test flakiness. I've been using 3.0.0 locally for tests for +a few weeks, and have not seen any flakiness in tests there. ([`8e80987`](https://github.com/kaskada-ai/kaskada/commit/8e809870a8fc8172e9a3ef937e5e8c8e1281b8ab)) + +* simplified docker-compose for ci, upgraded pulsar in ci ([`69a87f4`](https://github.com/kaskada-ai/kaskada/commit/69a87f427cc13dce5665410aea465ab74ba93880)) + +* Update ci_engine.yml (#469) ([`17126fa`](https://github.com/kaskada-ai/kaskada/commit/17126fa2dafc25684e02d3f0b15a09985f0e1dc7)) + +* comments ([`304c33b`](https://github.com/kaskada-ai/kaskada/commit/304c33b7234130e65d04603bd8890d19d63831bb)) + +* expose key ([`43832a9`](https://github.com/kaskada-ai/kaskada/commit/43832a958b1310cd9d9a47b5282de9fcb9988ac8)) + +* attempt to fix with lazy ([`6ace354`](https://github.com/kaskada-ai/kaskada/commit/6ace3545c325f79fc1464a032f5001cb6cc0a615)) + +* update inventory ([`53b0098`](https://github.com/kaskada-ai/kaskada/commit/53b0098f5ee4089ace0fd4a2cc8be778a417e6ea)) + +* Merge branch 'main' into ben/introduce-expressions ([`d2adee6`](https://github.com/kaskada-ai/kaskada/commit/d2adee698fc7abe656f33dbc0242914dc270f82b)) + +* fix path ([`327bb30`](https://github.com/kaskada-ai/kaskada/commit/327bb30a488f5c45ff0af00560a75129f71d33fd)) + +* Merge branch 'main' into feature/kafka-api-proposal ([`c6c2289`](https://github.com/kaskada-ai/kaskada/commit/c6c2289edef8e698ee3cd6538a5419554ca679c1)) + +* code review comments ([`b2df549`](https://github.com/kaskada-ai/kaskada/commit/b2df54997799a25c0a1bd2282498e801c3555750)) + +* added changes ([`cdbea22`](https://github.com/kaskada-ai/kaskada/commit/cdbea22fd3870892215bcf196479fbed5eb60b41)) + +* wip ([`0bac9ef`](https://github.com/kaskada-ai/kaskada/commit/0bac9ef7988642ce7adce732751adb883eb186bc)) + +* Merge branch 'main' into object_store_prepare_upload ([`f74afc5`](https://github.com/kaskada-ai/kaskada/commit/f74afc55b613d7ffb907cd7f6d807dc9b57599b2)) + +* reversed new test logic ([`307e556`](https://github.com/kaskada-ai/kaskada/commit/307e556871e87ef34ba011fbbcb7bf1db492644b)) + +* fixed issues running againsts object stores ([`0a3e68a`](https://github.com/kaskada-ai/kaskada/commit/0a3e68a6d98f02b930c6e4c60f3b880721f3b4da)) + +* proto fmt ([`4964e65`](https://github.com/kaskada-ai/kaskada/commit/4964e6551687e197b5f18a462814cc8a40006470)) + +* fix ([`c66c91e`](https://github.com/kaskada-ai/kaskada/commit/c66c91e92dd0ae22f31bf3c18dad835beb5d2602)) + +* wip: debugging, removing dead code ([`ba22a28`](https://github.com/kaskada-ai/kaskada/commit/ba22a2839f547c7eb179088380e53b7fba5d6dab)) + +* changed behavior to use files in-place by default ([`7dae536`](https://github.com/kaskada-ai/kaskada/commit/7dae536e68d4af54a811630cd0128c779c1315dd)) + +* upload buffer size ([`5861225`](https://github.com/kaskada-ai/kaskada/commit/58612254820038a6b0f552d7a7aeaa649872f042)) + +* comment: first/last ([`ffceddf`](https://github.com/kaskada-ai/kaskada/commit/ffceddfe6ff13fee6cb84294db3ccf2f59be0562)) + +* code review comments ([`1bdb03e`](https://github.com/kaskada-ai/kaskada/commit/1bdb03ee782d6b0add142a8675f69ba6b2cf99ff)) + +* add flake attempts to pulsar-to-pulsar ([`343606c`](https://github.com/kaskada-ai/kaskada/commit/343606ca05dfac1653501bab55aaa6e0eb8d5c90)) + +* include file prefix in data paths ([`8bbee62`](https://github.com/kaskada-ai/kaskada/commit/8bbee62e689a57a803ee790ce7503ff5eda22c62)) + +* added option to not copy files locally on load ([`2fd2925`](https://github.com/kaskada-ai/kaskada/commit/2fd2925719c6c1caa16f15ac0b1dd8b67be7123d)) + +* comments ([`0a43b3f`](https://github.com/kaskada-ai/kaskada/commit/0a43b3ff77a7ea9204fec1560176c57b0b2946e7)) + +* unused imports proto ([`7e0e7af`](https://github.com/kaskada-ai/kaskada/commit/7e0e7af4b1d21015a261b6730aa07f973505518c)) + +* formatting ([`026bd31`](https://github.com/kaskada-ai/kaskada/commit/026bd317d10340453bf3d0fce27656b3d9bbb8eb)) + +* Merge branch 'main' into feature/kafka-api-proposal ([`b665bd3`](https://github.com/kaskada-ai/kaskada/commit/b665bd3b0e33b7fd3ea3219b795dec136448a2e7)) + +* refactoring ([`bb3db03`](https://github.com/kaskada-ai/kaskada/commit/bb3db03b01d0feb5f5a4f899219aec88daf43f69)) + +* Add type variables to signatures ([`305bd36`](https://github.com/kaskada-ai/kaskada/commit/305bd36c7042e9f1f71b6468f2087bef03912ad0)) + +* rectified <a> tag for Development text in CONTRIBUTING.md file (#468) + +Close #467 ([`35ea1f9`](https://github.com/kaskada-ai/kaskada/commit/35ea1f9e6badf8eba672631a7487da2e4f288764)) + +* Update ci_engine.yml ([`bb2088b`](https://github.com/kaskada-ai/kaskada/commit/bb2088b5fbebfb37c16b9917c666b49f8402445d)) + +* rectified <a> tag for Development text in CONTRIBUTING.md file ([`f5cd294`](https://github.com/kaskada-ai/kaskada/commit/f5cd29411b8b068a6be5e22f8a335712a7d959bd)) + +* comments ([`3796009`](https://github.com/kaskada-ai/kaskada/commit/3796009b248c48c54ceec9d91f64696cf67d75f7)) + +* comments ([`9f25f43`](https://github.com/kaskada-ai/kaskada/commit/9f25f438b89d32749546f868d844542708699b2c)) + +* updated wren changes ([`35ec2bc`](https://github.com/kaskada-ai/kaskada/commit/35ec2bcec661de9ceda1b8b7e66d22b9d6d9b91a)) + +* code review comments ([`f126659`](https://github.com/kaskada-ai/kaskada/commit/f12665917db319306cf5131b1bdf0f192e92d70c)) + +* ref: Move hashing to sparrow-arrow (#454) + +Also add a `Hasher` that hashes into a re-usable buffer. This tends to +speed up the hash operation. The `Hasher` supports hashing of structs +and other types that we didn't previously support. ([`6460d09`](https://github.com/kaskada-ai/kaskada/commit/6460d09ceb5993917e3fa8195893603bec395836)) + +* fix struct hashing ([`495b8a7`](https://github.com/kaskada-ai/kaskada/commit/495b8a7d1c3bad4dec48b2f9efe6337d629f2848)) + +* ref: Move hashing to sparrow-arrow + +Also add a `Hasher` that hashes into a re-usable buffer. This tends to +speed up the hash operation. The `Hasher` supports hashing of structs +and other types that we didn't previously support. ([`35a95e8`](https://github.com/kaskada-ai/kaskada/commit/35a95e8a4e7e97b791ea61b3cab2669b2b104f62)) + +* deps: Upgrade to Arrow 42 (#455) + +It looks like they improved some of the time parsing/printing for CSV +and/or arithmetic to better respect timezones. This caused the results +for a variety of tests to change in terms of precision of output in the +CSV. ([`420b6f8`](https://github.com/kaskada-ai/kaskada/commit/420b6f82497e639cf92de79d3dec58300f755886)) + +* set run id ([`1104c33`](https://github.com/kaskada-ai/kaskada/commit/1104c3311e54e187b975d590d0a1a634046a7b7e)) + +* Fix query csv integ test ([`5187936`](https://github.com/kaskada-ai/kaskada/commit/5187936aae085f4950aee7e223d05ba10b7fd8ae)) + +* deps: Upgrade to Arrow 42 + +It looks like they improved some of the time parsing/printing for CSV +and/or arithmetic to better respect timezones. This caused the results +for a variety of tests to change in terms of precision of output in the +CSV. ([`de327da`](https://github.com/kaskada-ai/kaskada/commit/de327da6fa334a0a5344ce16f1546e9df22d23cf)) + +* bug: manager make directory for sqlite (#461) + +Fixes one issue in #459. Updates the dbpath logic to merge the provided +db path with the current path to produce an absolute path using the +`abs` ([docs](https://pkg.go.dev/path/filepath#Abs)). + +Previously, we were only checking for the home directory and not +necessarily worrying about any other use cases. The logic proposed here +should merge the two flows. ([`dc5d791`](https://github.com/kaskada-ai/kaskada/commit/dc5d7915a8b83a94616f70776b2e6229d9713964)) + +* Merge branch 'main' into bug/459-mkdir-tests ([`531a0a8`](https://github.com/kaskada-ai/kaskada/commit/531a0a898f431aab35026294b1901d6c91a8981b)) + +* use abs directory and mkdirall ([`83eeff5`](https://github.com/kaskada-ai/kaskada/commit/83eeff5d8cd0623641473831bf10a79553b0eb53)) + +* code review comments ([`a164186`](https://github.com/kaskada-ai/kaskada/commit/a164186500765dbd0aaa7e3e0b478b812e2d36ec)) + +* buf format ([`011a92a`](https://github.com/kaskada-ai/kaskada/commit/011a92a8b4e5e8565801f977036965a03d6e6171)) + +* kafka streaming api proposal ([`d12f500`](https://github.com/kaskada-ai/kaskada/commit/d12f500444a86738af20d16afbd8b11928d0a698)) + +* enable pulsar tests and remove some old tests (#453) ([`a09a39f`](https://github.com/kaskada-ai/kaskada/commit/a09a39f9a4bedb09dfe68713752e7ca71670fdbc)) + +* enable pulsar tests and remove some old tests ([`c76c60e`](https://github.com/kaskada-ai/kaskada/commit/c76c60e13bf505626760a09f378a18b5b3c7672c)) + +* bug: fix materialization stop signal (#451) + +Fixes the stop signal bug by using a stream in scan operation that takes +until the signal is received. + +Still keeps the pulsar to pulsar tests ignored, since I have this log +line I haven't figured out yet: +``` +2023-06-26T14:20:32.562464Z ERROR Error sending end event to channel - send failed because receiver is gone + +``` ([`22f7b6d`](https://github.com/kaskada-ai/kaskada/commit/22f7b6dffd0aaefc3951b918d709b9ac4e273d43)) + +* docs ([`1d7bd67`](https://github.com/kaskada-ai/kaskada/commit/1d7bd6701a34afd8ba1d73394a7b6db62205c366)) + +* fix query dropping stop signal channel ([`91f843e`](https://github.com/kaskada-ai/kaskada/commit/91f843e4f83bfd54c5072cbaf3e1d032e70da612)) + +* Merge branch 'main' into fix-materialize-stop-signal ([`b55bd99`](https://github.com/kaskada-ai/kaskada/commit/b55bd99ab244d78952cf58d1346680c5302fb1a0)) + +* comment ([`8f7639f`](https://github.com/kaskada-ai/kaskada/commit/8f7639f5f2b5dfa7079a83637ac7525df3db8f26)) + +* Use take until for stopsignal ([`ddb90f0`](https://github.com/kaskada-ai/kaskada/commit/ddb90f06335422e1bb88ba780110ffefe43a9b63)) + +* Update Pulsar Output Per Batch (#450) + +Updates the pulsar output module to have the producer send the internal +batch after processing every batch. See: +https://docs.google.com/document/d/1gU8NV65ATliGd2-eG2G-mCX2mu8S3jY9m9x1wOVqsaw/edit?usp=sharing +(June 22, 2023) for more info. ([`269acb8`](https://github.com/kaskada-ai/kaskada/commit/269acb8e3d9df0386418c02acf6bc2565e8f280c)) + +* Merge branch 'main' into feature/fix-pulsar-to-pulsar-tests ([`215d6a6`](https://github.com/kaskada-ai/kaskada/commit/215d6a6e8817d4588128e1691ef446feef290cd3)) + +* benchmark: Initial scripts for some benchmarking (#449) + +- Data generator script to create Parquet files consistent with the +examples in timeline posts +- SQL file containing DuckDB versions of the queries from the posts +- Notebook containing Kaskada versions of the queries from the posts ([`2bd1a7c`](https://github.com/kaskada-ai/kaskada/commit/2bd1a7c46c7437f0b316d4081a9747499d783aac)) + +* back to pending test ([`815c6a2`](https://github.com/kaskada-ai/kaskada/commit/815c6a25a6c679df52d661e2de5e96a364f9b1cf)) + +* cargo fmt ([`a884d8b`](https://github.com/kaskada-ai/kaskada/commit/a884d8b15acfba8e1e23ca50ab630d6971767960)) + +* fixed some tests ([`84255cd`](https://github.com/kaskada-ai/kaskada/commit/84255cd62e8311c7bb7b840508c59eb1704c0e4b)) + +* Merge branch 'main' into ben/sql-benchmarking ([`d11f147`](https://github.com/kaskada-ai/kaskada/commit/d11f147c640e058b1119a195eafcc369ffc07a57)) + +* scripts used for benchmarking ([`3ba318d`](https://github.com/kaskada-ai/kaskada/commit/3ba318dd285a0b5abf79914a91c4d0b3efb7d77f)) + +* added materialization integration tests with pulsar sources & destinations (#448) + +currently the `mat_pulsar_to_pulsar_test` isn't passing, but I think we +should merge this as is due to several bug fixes contained inside, and +then continue working on the remaining test in another PR. ([`1691a71`](https://github.com/kaskada-ai/kaskada/commit/1691a7122950d683fbe1dc549d261959d90f20d0)) + +* updated ssl ([`22c4f79`](https://github.com/kaskada-ai/kaskada/commit/22c4f79c2126b3a43ddcd64a73bb60966a5b2476)) + +* removed test ([`4c67a67`](https://github.com/kaskada-ai/kaskada/commit/4c67a67b1051121442863d26bcfebde03d2f971e)) + +* table to pulsar test now passing ([`af1eb7d`](https://github.com/kaskada-ai/kaskada/commit/af1eb7d720465760937204fd999ac1c6e0e0ef1b)) + +* fixed reconcile bug ([`0e3a826`](https://github.com/kaskada-ai/kaskada/commit/0e3a826f2ca733558399aeacdd92b6b43bee5395)) + +* more tweaks ([`e6327d3`](https://github.com/kaskada-ai/kaskada/commit/e6327d334153535019347b3bf9bf7f0836e8aba1)) + +* minor tweaks ([`73066f6`](https://github.com/kaskada-ai/kaskada/commit/73066f69609d93ed4a6dad5414870dc007e88268)) + +* added check to query for stream backed tables ([`c00329c`](https://github.com/kaskada-ai/kaskada/commit/c00329c8665a93f1ba9f941058722d73bc436225)) + +* updated tests ([`0b053aa`](https://github.com/kaskada-ai/kaskada/commit/0b053aaaa24bb9f9fdfeeae5db602a61194900d0)) + +* more test progress ([`fd76c14`](https://github.com/kaskada-ai/kaskada/commit/fd76c14a8e1f4b7237fbe7e7178050de5cf83068)) + +* debug ([`7e55d4d`](https://github.com/kaskada-ai/kaskada/commit/7e55d4d6e5c4dce1af9af798dea477fc2dca80b2)) + +* updated tests to run locally ([`a5c40e4`](https://github.com/kaskada-ai/kaskada/commit/a5c40e48db64b84c436858c83415443ebeb9ec72)) + +* added initial pulsar to pulsar test ([`ef8545e`](https://github.com/kaskada-ai/kaskada/commit/ef8545ec4058cfbce3e77be31f6945b7c74b445b)) + +* initial add of pulsar to obj_store test ([`d1f54f5`](https://github.com/kaskada-ai/kaskada/commit/d1f54f5712180726fa0f45bc9e7bdb44b745a321)) + +* add data generator and queries for duckdb ([`bd744ac`](https://github.com/kaskada-ai/kaskada/commit/bd744ac1ff2c0ea0fd63c6b6c95d9fe4f4af00ec)) + +* fix juypter dockerfile and bump versions (#446) ([`70a7b39`](https://github.com/kaskada-ai/kaskada/commit/70a7b3904c84974873ab4344ee2b1ba2ee4146bd)) + +* bump version to 0.8.2 (#444) ([`778e883`](https://github.com/kaskada-ai/kaskada/commit/778e883b163b5335ad2fa0555d4caacc7d150665)) + +* fixed Jupyter docker build (#443) ([`5ab7884`](https://github.com/kaskada-ai/kaskada/commit/5ab7884be446290f03137e30190ad7b8ac994d28)) + +* use mmap instead of direct i/o (#439) + +Flipping to mmap from direct I/O for rocksdb makes sparrow test run on +arch. I verified it still works on Ubuntu and OSX. ([`ea44d03`](https://github.com/kaskada-ai/kaskada/commit/ea44d03faaf1ca849cd643c4708516ddb0aa1706)) + +* fixed Jupyter docker build ([`47eb093`](https://github.com/kaskada-ai/kaskada/commit/47eb0935d1f546cf8cff71312b618f6f3bf77144)) + +* Merge branch 'main' into tsk-mmap-for-rocksdb ([`9e8380f`](https://github.com/kaskada-ai/kaskada/commit/9e8380f32dcfbd9398d98dc34748e66e00b3b167)) + +* bumped version to 8.0.1 (#441) ([`659fd39`](https://github.com/kaskada-ai/kaskada/commit/659fd390c36334acc13557efe6673a4cca88fb4d)) + +* Merge branch 'main' into tsk-mmap-for-rocksdb ([`7485da1`](https://github.com/kaskada-ai/kaskada/commit/7485da1d43c3986fd7e5753b107871cf3ebc5481)) + +* Kaskada Release 0.7.1 and Python 0.4.1 (#438) ([`c43c5f1`](https://github.com/kaskada-ai/kaskada/commit/c43c5f175541202bf88d5cf341e83e0765075b74)) + +* bumped versions ([`cd5c610`](https://github.com/kaskada-ai/kaskada/commit/cd5c61004098980e2df5e762211ea9cf274ef5a0)) + +* Update Streaming Avro Record Output (#433) + +PR with the progress made towards debugging the streaming +implementation. + +Changes: +* Reverted the revert for Publish Time. The implementation was correct, +just missing one more step. +* Added the missing step for publish time. When reading the stream, a +flag is passed to consider adding the publish time. + * In the execution phase, no publish time (false). + * In the preparation phase, publish time (true). ([`cb631e4`](https://github.com/kaskada-ai/kaskada/commit/cb631e4a080230788b83936a822ad7a7df125a72)) + +* fixed ReconcileMaterializations panic (#437) + +also fixes wren hanging after ctrl+c. ([`1911554`](https://github.com/kaskada-ai/kaskada/commit/1911554f7b7f69d273301c9faf41a1a082a13e52)) + +* use mmap instead of direct i/o ([`4ec74be`](https://github.com/kaskada-ai/kaskada/commit/4ec74bef518a84f3f9ef38b63f09c53a28668bdb)) + +* fixed ReconcileMaterializations panic ([`2d180dc`](https://github.com/kaskada-ai/kaskada/commit/2d180dcc893572b5538e95b42d141c9a51d39341)) + +* added changes ([`e81d5b7`](https://github.com/kaskada-ai/kaskada/commit/e81d5b7da6e399af3587211ef7d96a4e39b94d98)) + +* force all-results when using stream-based materializations (#435) ([`ed224e8`](https://github.com/kaskada-ai/kaskada/commit/ed224e8d16bfb98af7f222fb6f8e82e1b6414c7d)) + +* fixed test ([`eb7ce79`](https://github.com/kaskada-ai/kaskada/commit/eb7ce7930294fe77756a504365f9a3e8d3843208)) + +* force all-results when using stream-based materializations ([`c0e1469`](https://github.com/kaskada-ai/kaskada/commit/c0e14697758cf36a056d598828fb8a095f46e530)) + +* Revert "Remove Publish Time from non-prepare Pulsar Paths" (#431) + +Reverts kaskada-ai/kaskada#426 ([`53dd9b9`](https://github.com/kaskada-ai/kaskada/commit/53dd9b970d5e36838c780608d067de63f4c6a5af)) + +* Revert "Remove Publish Time from non-prepare Pulsar Paths" ([`08cce0e`](https://github.com/kaskada-ai/kaskada/commit/08cce0e2716c9ada758faa48bcab7fd7dd52c0e9)) + +* Update exporting-for-training.adoc (#428) + +From the context I think these lines should look like this. ([`88ae9ad`](https://github.com/kaskada-ai/kaskada/commit/88ae9ad7cf962fae476428a9ac26507717f02cc3)) + +* Merge branch 'main' into patch-3 ([`a568803`](https://github.com/kaskada-ai/kaskada/commit/a568803f466e556dbda159a302216be0da9b7fd3)) + +* Remove Publish Time from non-prepare Pulsar Paths (#426) ([`a8826e4`](https://github.com/kaskada-ai/kaskada/commit/a8826e44f2a02e35f628e354ddadfd7172de8680)) + +* Delete wren binary (#429) ([`e16e646`](https://github.com/kaskada-ai/kaskada/commit/e16e6460f9db4bf13115ca64e91e68678494e672)) + +* Update exporting-for-training.adoc + +From the context I think these lines should look like this. ([`834e3c1`](https://github.com/kaskada-ai/kaskada/commit/834e3c178d7916d5d74d7a59050717d242c226b7)) + +* Update aggregation-and-windowing.adoc (#421) + +I noticed that the table was rendered differently than you should +intend. I found that the description method of the cell was slightly +different from other tables. So, I added the missing symbol. ([`d7308f6`](https://github.com/kaskada-ai/kaskada/commit/d7308f6c0a5696dfe82708974cb1910b043ddd96)) + +* Merge branch 'main' into feature/file-service-pulsar ([`8bb3853`](https://github.com/kaskada-ai/kaskada/commit/8bb385371faecd71f632ff8e9a16e7a8edae51b9)) + +* publish time only on prepare ([`bca1a7c`](https://github.com/kaskada-ai/kaskada/commit/bca1a7c84b3f2312bebf6ac301cff06225f869b0)) + +* rectified a spelling mistake in README file (#423) + +Close #422 ([`00c79d3`](https://github.com/kaskada-ai/kaskada/commit/00c79d329b425fbe192bf966bd7ca807347d432d)) + +* Merge branch 'main' into upgrade-arrow ([`54f9810`](https://github.com/kaskada-ai/kaskada/commit/54f981057c97ff7f282fca58c9f6509281db0eea)) + +* fix tests ([`5b75acd`](https://github.com/kaskada-ai/kaskada/commit/5b75acded70d057133241a8d260e94e5646aae95)) + +* Merge branch 'main' into readme_branch ([`f41b708`](https://github.com/kaskada-ai/kaskada/commit/f41b708b5ab4c405196e9df60941759069bfd8e0)) + +* rectified a spelling mistake in README file ([`cc43f8d`](https://github.com/kaskada-ai/kaskada/commit/cc43f8d95f41df15560b6f0b93a5a4d3a41cb24e)) + +* fix some problems ([`c06707a`](https://github.com/kaskada-ai/kaskada/commit/c06707a73c5c962f2d3f614518772478f7dee9c7)) + +* Merge branch 'main' into patch-2 ([`e4baea8`](https://github.com/kaskada-ai/kaskada/commit/e4baea8cc7915dc7369e4c92b39f6d862283aa6d)) + +* Update aggregation-and-windowing.adoc + +I noticed that the table was rendered differently than you should intend. I found that the description method of the cell was slightly different from other tables. So, I added the missing symbol. ([`ed68994`](https://github.com/kaskada-ai/kaskada/commit/ed689942162d85c2baf83621f1a4187512a8e7ed)) + +* Add Admin Service URL to Pulsar Backed Table (#408) + +Adds the `admin_service_url` to the python client to fix support for +Pulsar tables. ([`15e3504`](https://github.com/kaskada-ai/kaskada/commit/15e350497eca89e6ec9a6c926ea615a3e53c3039)) + +* Fix wren status running ([`cf3c88c`](https://github.com/kaskada-ai/kaskada/commit/cf3c88c66a315af74cdb2957d58d4d0c465bacf7)) + +* clippy ([`35ccbdd`](https://github.com/kaskada-ai/kaskada/commit/35ccbdda12c70171d923a5d1872603f487e872b6)) + +* remove id from control ([`9730da4`](https://github.com/kaskada-ai/kaskada/commit/9730da4e791b834078ac369c185c49fa78e29ac8)) + +* Implement materialization api ([`991240d`](https://github.com/kaskada-ai/kaskada/commit/991240d50d92c29484935673d8a79c55a7f4cb44)) + +* clippy ([`dccc1cf`](https://github.com/kaskada-ai/kaskada/commit/dccc1cf6caaf9892468685bb809aeab8be94913c)) + +* comments ([`253b15b`](https://github.com/kaskada-ai/kaskada/commit/253b15bc8bd577dd4fc259bbc77c4129d8ce3927)) + +* use indexed vec; split out conditions for breaking pipeline ([`b01aa65`](https://github.com/kaskada-ai/kaskada/commit/b01aa657d496ccfff3480bfa30273db95112c6b7)) + +* add comments ([`b2e1bfd`](https://github.com/kaskada-ai/kaskada/commit/b2e1bfd2193274e47232015a95248ab4a33ebb14)) + +* Merge branch 'main' into physical-plan ([`cafd206`](https://github.com/kaskada-ai/kaskada/commit/cafd206479685d588822875b847226c1dce54951)) + +* ref: create an Exprs (#412) ([`0cd8f18`](https://github.com/kaskada-ai/kaskada/commit/0cd8f1866b617b33e8ef7237cdf180440cf3b2cf)) + +* ref: create an Exprs ([`f3257f5`](https://github.com/kaskada-ai/kaskada/commit/f3257f5b5ccf981dd1dba1c17cc411a24049cce6)) + +* comments ([`7f85b9c`](https://github.com/kaskada-ai/kaskada/commit/7f85b9c0586a961d621633cda6fa0f1ebc591b98)) + +* review comments ([`3fbe779`](https://github.com/kaskada-ai/kaskada/commit/3fbe779960f47bf923429da5d0fd037b3ad21f4b)) + +* updated tests ([`d64bc6b`](https://github.com/kaskada-ai/kaskada/commit/d64bc6bd84c9053212599dd6d4ec88f827b40bb4)) + +* ref: Move ScalarValue & downcast to sparrow-arrow (#407) + +There is more cleanup to be done here -- using error_stack, getting rid +of the (generally undesired) string parsing, etc., but splitting here in +the interest of smaller / easier to review changes. ([`afecac7`](https://github.com/kaskada-ai/kaskada/commit/afecac7929459559927fc3c510f7ade68d2f9548)) + +* Merge branch 'main' into feature/file-service-pulsar ([`2e855b2`](https://github.com/kaskada-ai/kaskada/commit/2e855b2046df903f9c457c3c57534b8423b572a4)) + +* updated python model ([`4a2604d`](https://github.com/kaskada-ai/kaskada/commit/4a2604d5352215a16c4a169dad8c761af9a4d982)) + +* remove unused deps on core ([`4df565d`](https://github.com/kaskada-ai/kaskada/commit/4df565d2ae11d734517649b72641c39a24a0196f)) + +* ref: Move ScalarValue & downcast to sparrow-arrow + +There is more cleanup to be done here -- using error_stack, getting +rid of the (generally undesired) string parsing, etc., but splitting +here in the interest of smaller / easier to review changes. ([`2dbbc8d`](https://github.com/kaskada-ai/kaskada/commit/2dbbc8dada851e5d5d8e1d2445979ce173247159)) + +* Merge branch 'main' into tsk-docs-note-on-install ([`4eb58a7`](https://github.com/kaskada-ai/kaskada/commit/4eb58a74755054935ff9e4abad9ab72a8611350f)) + +* use -o in Makefile ([`3d05cc7`](https://github.com/kaskada-ai/kaskada/commit/3d05cc7f34e2ee3c6e497255c8996867e5902ace)) + +* Update .gitignore + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`f842fe1`](https://github.com/kaskada-ai/kaskada/commit/f842fe186ae5b3cd49a2d57a458fa67412309035)) + +* Merge branch 'main' into tsk-docs-note-on-install ([`b652ebf`](https://github.com/kaskada-ai/kaskada/commit/b652ebfe80dd1bc2fc73ead6661518a1f9405e3d)) + +* Use a relative path for the file, include a step allowing this to run… (#405) + +… locally ([`e752de3`](https://github.com/kaskada-ai/kaskada/commit/e752de3ab233b255851641d3e4c4a6d44459fd32)) + +* Update README.md to use new docker container (#404) ([`1f6bd82`](https://github.com/kaskada-ai/kaskada/commit/1f6bd82e61c36dba7e3fb4b5d31c683c2ff2cc82)) + +* Use a relative path for the file, include a step allowing this to run locally ([`f727939`](https://github.com/kaskada-ai/kaskada/commit/f7279393e7c5182320f3f9b24a9868d9a423b139)) + +* Merge branch 'main' into tsk-cla-check-workaround ([`1c9cc26`](https://github.com/kaskada-ai/kaskada/commit/1c9cc26f4f9045d716af570fc3802388e3190eae)) + +* Merge branch 'main' into tsk-ci-fix-integtest-skipping ([`fa922d3`](https://github.com/kaskada-ai/kaskada/commit/fa922d34056c650fab0bc4530c10df76c5ee2c59)) + +* Update README.md to use new docker container ([`518892f`](https://github.com/kaskada-ai/kaskada/commit/518892f47f822a31a324ca122a3cdf47bb3894a8)) + +* bump versions for release (#402) ([`e87a902`](https://github.com/kaskada-ai/kaskada/commit/e87a9024b54dc265811de194dee03a85202cdfd0)) + +* Merge branch 'main' into tsk-push-to-dockerhub ([`b6f1c14`](https://github.com/kaskada-ai/kaskada/commit/b6f1c145d036660ecb3101a91dc541b4b6ce8139)) + +* Merge branch 'tsk-push-to-dockerhub' of github.com:kaskada-ai/kaskada into tsk-push-to-dockerhub ([`5439220`](https://github.com/kaskada-ai/kaskada/commit/54392208eb1ffb6fb6458210caf4044a586d657d)) + +* fix typo ([`d282fde`](https://github.com/kaskada-ai/kaskada/commit/d282fde2f58fcfcf8117be8c85ad7beeda3df1a2)) + +* Merge branch 'main' into tsk-push-to-dockerhub ([`d1a866c`](https://github.com/kaskada-ai/kaskada/commit/d1a866c8710fac7e91425c5620afa7a7a498255c)) + +* (ci) login and push release images to DockerHub ([`09cc214`](https://github.com/kaskada-ai/kaskada/commit/09cc214a76766b491ecf3cc368ccceeacd84c011)) + +* doc: fix mac OSX xattr kaskada cli (#396) + +Updates the setup instructions for the Kaskada cli to use `-w` since +`-dr` are not valid flags in the latest OSX. Also updates to explicitly +state the 3 binaries. ([`4802dfe`](https://github.com/kaskada-ai/kaskada/commit/4802dfe294e34b8d1d4582ca64fba6030203be8f)) + +* Update working-with-records.adoc (#393) + +This must be a typo + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`0bacc37`](https://github.com/kaskada-ai/kaskada/commit/0bacc37b57a1c321b31e1af17e3b6a8e5f9fea53)) + +* Merge branch 'main' into kevinjnguyen-patch-1 ([`5b83b9d`](https://github.com/kaskada-ai/kaskada/commit/5b83b9d9e45d9ca8e08ebc5d1d7d8f45f4e6d159)) + +* Fenl Diagnostic HTML Rendering with Protos (#280) (#376) (#385) + +# Fenl Diagnostic HTML Rendering with Protos + +A second pass at implementing rendering of Fenl Diagnostics from the +Engine upward. The UX is the same but now the source of truth is the +Engine. + +## Changes + +* Removes the custom python client approach +* Updates fenl diagnostic to have a `web_link` as another field +* Updates python client rendering to use the `web_link` field ([`d348ee9`](https://github.com/kaskada-ai/kaskada/commit/d348ee981ca97fac019ed87d9f55e91f7afb5f22)) + +* doc: fix mac OSX xattr kaskada cli ([`6a82dfb`](https://github.com/kaskada-ai/kaskada/commit/6a82dfb77db53dbb7ab99bc889f4ab96bcca4e99)) + +* example: Add SVG to PNG conversion script (#389) ([`150f718`](https://github.com/kaskada-ai/kaskada/commit/150f7187d9b307e6813412de7a7abbe0f72d2180)) + +* added nav link ([`be3b8e5`](https://github.com/kaskada-ai/kaskada/commit/be3b8e5f8f6ab63ad8c2f290f99bcf0165a4c4b1)) + +* added links ([`37b62ef`](https://github.com/kaskada-ai/kaskada/commit/37b62ef53fc341c5143f4ab28cae0f0c84c743f8)) + +* added adoc ([`5d7b838`](https://github.com/kaskada-ai/kaskada/commit/5d7b838cd85920cde39f5de72b8a04a5b06070d5)) + +* remote session ([`c3a4540`](https://github.com/kaskada-ai/kaskada/commit/c3a45400f76e4fd054f0050c3d3d85565f971dc1)) + +* example: Add SVG to PNG conversion script ([`76e57b2`](https://github.com/kaskada-ai/kaskada/commit/76e57b2f7f51caa4b14c3d80e45165ddf5be4813)) + +* added long-running materialization support to wren (#387) + +@jordanrfrazier this is a new branch than the previous PR: +https://github.com/kaskada-ai/kaskada/pull/374 which I'm going to close. ([`a1b240d`](https://github.com/kaskada-ai/kaskada/commit/a1b240d234dccc6d4391fe4d53505eeda774e6bd)) + +* made suggested updates ([`29b4a97`](https://github.com/kaskada-ai/kaskada/commit/29b4a97848c89dba1aca587f8787217c9f6ed4d8)) + +* added long-running materialization support to wren ([`0610266`](https://github.com/kaskada-ai/kaskada/commit/0610266a600b4b19ec4f3c014d782ab0e87d9c35)) + +* refactored the compute package into several interfaces (#386) + +now have: +* CompileManager +* ComputeManager +* FileManager +* PrepareManager +* MaterializationManager ([`e0525e9`](https://github.com/kaskada-ai/kaskada/commit/e0525e9310f494f89582eed150ec97b3a150657e)) + +* added initial compile unit test ([`7092c0e`](https://github.com/kaskada-ai/kaskada/commit/7092c0e189d83fe67a931f16b0bd97632d505304)) + +* refactored the compute package into several interfaces ([`50b1c63`](https://github.com/kaskada-ai/kaskada/commit/50b1c63447b1127173d1a38316fd18d39e48bd72)) + +* (ci): fix typo in expression for skipping integ tests (#382) ([`e00a8b0`](https://github.com/kaskada-ai/kaskada/commit/e00a8b086397c1ad557897b643d33e1a4743e003)) + +* (ci): fix typo in expression for skipping integ tests ([`20472c9`](https://github.com/kaskada-ai/kaskada/commit/20472c9f6e2f9cfdf2b11fc66e83662abfed2639)) + +* (test) fix pip package test to avoid infinite restart (#381) + +Added feature to restart manager and engine causes original script to go +in an infinite restart loop. Testing the pip package now calls +`session.stop()` to terminate the session + +*NOTE*: there is an error from grpc and _channel.py that we ignore, our +processes seem to be terminated correctly ([`01b32e7`](https://github.com/kaskada-ai/kaskada/commit/01b32e725178da4ed109d6da870b2e7a78de225e)) + +* Merge branch 'main' into python0.3.0 ([`e22ddce`](https://github.com/kaskada-ai/kaskada/commit/e22ddce1d9ae58b1c258f73ddde0bf730b067a6b)) + +* update notebook ([`70520f9`](https://github.com/kaskada-ai/kaskada/commit/70520f923292eefbab3701db21e84773045310b2)) + +* (test) fix pip package test to avoid infinite restart ([`6fd2d31`](https://github.com/kaskada-ai/kaskada/commit/6fd2d31411b5867aab4e798ccec2ec65909a7f92)) + +* Fenl Diagnostic HTML Rendering (#280) (#376) + +# Fenl Diagnostic HTML Rendering + +Updates the rendering of the Fenl Diagnostic to render HTML A tags for +known and documented error. Also lays out the ground work for easy error +documentation. + +The new implementation will scan through a Fenl Diagnostic for an error +code using the regex `error\[(,?.*)\]`, then check to see if that code +is documented as part of the `error_codes.py: FENL_DIAGNOSTIC_ERRORS`. +If there is a documented error, the code converts the error code to an +HTML <a> link. + +**Example of a pre-defined error:** (Note the hyperlink E0013) +<img width="542" alt="Screenshot 2023-05-23 at 9 34 48 PM" +src="https://github.com/kaskada-ai/kaskada/assets/15032894/052516fb-fa6f-4efd-b81c-857fa11a40de"> + +**Example of the default undocumented case:** +<img width="727" alt="Screenshot 2023-05-23 at 9 35 20 PM" +src="https://github.com/kaskada-ai/kaskada/assets/15032894/ec3996da-8919-467a-b4d7-b2a76440a31d"> ([`14b132d`](https://github.com/kaskada-ai/kaskada/commit/14b132d84794efaf1912a5b98854dbeba5d9cc45)) + +* Python Release 0.3.0 (#375) + +Updates the Python version to 0.3.0 and updates the docs for no auto +recovery. ([`7764a8a`](https://github.com/kaskada-ai/kaskada/commit/7764a8a5a535b4fe5ce783d489ae19b3a8ee9b56)) + +* #280 improved error messaging ([`238dd8a`](https://github.com/kaskada-ai/kaskada/commit/238dd8af3f182e36c29d0b09c47e8422f54702bb)) + +* Add compile result debug ([`73d0aa0`](https://github.com/kaskada-ai/kaskada/commit/73d0aa0e86e5116f3eab734384a5f7d0f9ce0be6)) + +* Fix source on table ([`5ba599e`](https://github.com/kaskada-ai/kaskada/commit/5ba599eac98ecc0d89ed469843f7c4d61b4dbe09)) + +* Add source to tables in wren ([`8c3b66e`](https://github.com/kaskada-ai/kaskada/commit/8c3b66e16af87b7bf40a2afb1297f717363fce5b)) + +* clippy fixes ([`d54416d`](https://github.com/kaskada-ai/kaskada/commit/d54416da46d375a664a5ff0764e4711b6bccc199)) + +* Add bounded lateness option in script ([`8702095`](https://github.com/kaskada-ai/kaskada/commit/87020955d64d68cc6acd24af2a0957015651750b)) + +* add examples ([`47b18e0`](https://github.com/kaskada-ai/kaskada/commit/47b18e0cc1b7a6e587a7b68c02ccd9feb2a8bfd4)) + +* Oops, actually add readme ([`edef40e`](https://github.com/kaskada-ai/kaskada/commit/edef40e538a5cec4736fc72a809d5338515e3ab3)) + +* Update readme formatting ([`68f76f0`](https://github.com/kaskada-ai/kaskada/commit/68f76f09a2400852bcf111d9101893e0fdd01fe5)) + +* Add draft readme ([`0ad2b25`](https://github.com/kaskada-ai/kaskada/commit/0ad2b25d537679746d346d1fdea136b53fb66996)) + +* Fix projected schema ([`d99ec86`](https://github.com/kaskada-ai/kaskada/commit/d99ec86de8eafae43ab2661d95e276fd78bc6162)) + +* Add unit test for single row batch ([`efa9890`](https://github.com/kaskada-ai/kaskada/commit/efa9890cef81f55a838dafcd429d03c9bd1585d6)) + +* remove duplicate script ([`fb49dc1`](https://github.com/kaskada-ai/kaskada/commit/fb49dc1f09950e0379b19dd8295b8e36521374b7)) + +* logs/comments ([`32e4410`](https://github.com/kaskada-ai/kaskada/commit/32e441096499505ef4ff926ea5b42073f64cb4e3)) + +* Allow optional auth for pulsar, and some end stream fixes ([`f451814`](https://github.com/kaskada-ai/kaskada/commit/f4518144f32c36dca5580ec1ed37ecc5c7a05439)) + +* Fixes some loop breaks and the initial watermark ([`e591dc0`](https://github.com/kaskada-ai/kaskada/commit/e591dc058c08ba1ae3a2114570df73f68d1df025)) + +* materialize script ([`adcfa82`](https://github.com/kaskada-ai/kaskada/commit/adcfa829fa0f26a6955ac7ebf6d37dffdc9f67f8)) + +* Add stream reader framework ([`2893129`](https://github.com/kaskada-ai/kaskada/commit/289312987b572bef211a10b067949670b9597741)) + +* update labels to use plain text ([`5661d31`](https://github.com/kaskada-ai/kaskada/commit/5661d318cf1ec7be4079d81fdbb4390c703daacf)) + +* updated with docs ([`5ac51c0`](https://github.com/kaskada-ai/kaskada/commit/5ac51c0deefa95cbfb343273ac0a41b632f0405a)) + +* added mock generation to wren (#370) + +Also updated existing tests to use the new mocks, so that we don't need +to manually update the mocks in tests when interfaces change. + +this is mostly prep for upcoming changes related to materializations ([`30d5b49`](https://github.com/kaskada-ai/kaskada/commit/30d5b494f67a62c2363e2bb53a9a523434f137bc)) + +* generate mocks in CI ([`c516e3d`](https://github.com/kaskada-ai/kaskada/commit/c516e3d44aeca933410bc22be6b1768aae7179bd)) + +* removed generated code ([`3de4152`](https://github.com/kaskada-ai/kaskada/commit/3de4152415d4db9684da5de07265544d72f837d7)) + +* added interface for computeManger (#368) + +this should facilitate easier unit testing of wren. + +also relocated a `getDataToken` method from Compute to QueryV1, since it +is only used there. ([`b43daf8`](https://github.com/kaskada-ai/kaskada/commit/b43daf89976075470765c14b5a53f91cc9d2235e)) + +* added mock generation to wren and updated existing tests ([`e43fb14`](https://github.com/kaskada-ai/kaskada/commit/e43fb149928bd73fb12235582e4e40138b781342)) + +* added interface for computeManger ([`6ca516c`](https://github.com/kaskada-ai/kaskada/commit/6ca516c085eaf953361306be89a7c888880bcdfe)) + +* fix some queries ([`53edb6e`](https://github.com/kaskada-ai/kaskada/commit/53edb6e590d465c60ba75fa7e8cc97c90f1d2922)) + +* bumped versions to 0.6.4 (#363) ([`3152014`](https://github.com/kaskada-ai/kaskada/commit/3152014b6dd84802aef2b02aac2297a06f7aff2c)) + +* fixed the build of the jupyter image (#362) ([`67febad`](https://github.com/kaskada-ai/kaskada/commit/67febadf0d643857a9f55a27553047081eb2ac56)) + +* bump versions for release (#360) ([`f609c1b`](https://github.com/kaskada-ai/kaskada/commit/f609c1b2413aae8ce034e84f43d6f9df70f517d2)) + +* fix engine log startup (#359) ([`cf1a450`](https://github.com/kaskada-ai/kaskada/commit/cf1a4501adb2f91c4337ed12e0efe11326cae5b9)) + +* updated logging formats of both processes to be more similar (#358) + +Added flags and `NO_COLOR` environment variable support to both wren & +sparrow to disable ANSI color args in log output when set. + +see: https://no-color.org/ + +Also +* Disabled wren logging database calls by default in debug mode. Added +new flag to re-enable when desired. +* Updated python client to disable color log output for services ([`c714767`](https://github.com/kaskada-ai/kaskada/commit/c7147673b840c7e04003d15eb5957176ab06f89a)) + +* Add execution stream for pulsar (#320) + +Updates the iterators to streams and creates an execution input stream +that implements input buffer logic with watermarks. + +Next PR will hook this up to the `Scan` operation using pulsar-backed +tables. ([`697b7f5`](https://github.com/kaskada-ai/kaskada/commit/697b7f58e21db6b0f4305e313380c6982ccb883f)) + +* fixed tests ([`6c482b7`](https://github.com/kaskada-ai/kaskada/commit/6c482b7b1a8d01d30dab7200e1471f5b8fc8dfed)) + +* move key hash updates outside of column behavior loop ([`a3d77fd`](https://github.com/kaskada-ai/kaskada/commit/a3d77fd9acc1325a794c78fdc71837d5ecfe9459)) + +* explicitly add table config? ([`06adb93`](https://github.com/kaskada-ai/kaskada/commit/06adb939cdd71a8a5fb5d52cb6eea55648bc9667)) + +* remove unused import ([`f4aa3b9`](https://github.com/kaskada-ai/kaskada/commit/f4aa3b9c71d92df7652959d548dc343afad5bdd7)) + +* update var name ([`9c35a7c`](https://github.com/kaskada-ai/kaskada/commit/9c35a7c35c2c16f01944d6da23d6d1ed3cbeeaca)) + +* Remove extra column behavior ([`7731a49`](https://github.com/kaskada-ai/kaskada/commit/7731a49049ac53aa3a53b5bd1c42b3291ad39c12)) + +* Refactor prepare iter to return a stream ([`42cbc4b`](https://github.com/kaskada-ai/kaskada/commit/42cbc4b5187c6737b6d4919ab2efe6bf1cc0a8ce)) + +* Add pulsar stream path ([`58739dd`](https://github.com/kaskada-ai/kaskada/commit/58739ddec6ee9aa883416fb57c0c5c479b1c8837)) + +* move avro_arrow to arrow crate ([`a530249`](https://github.com/kaskada-ai/kaskada/commit/a53024978e08c50cf9a54545787e18d0e7292acc)) + +* Fix compile errors ([`4949d54`](https://github.com/kaskada-ai/kaskada/commit/4949d5429ab3c70b96f8fd90c9a0d9b103d1a107)) + +* fix stream ([`0609ee4`](https://github.com/kaskada-ai/kaskada/commit/0609ee44685eca5c6cd6b19df64a4f6f106df01f)) + +* trying to make iters async ([`61cdef6`](https://github.com/kaskada-ai/kaskada/commit/61cdef62af6a3e3bcd8299dcbd1864bfbc682ada)) + +* comments ([`588f230`](https://github.com/kaskada-ai/kaskada/commit/588f2306c5a02fe5b774b13429172cab6a3e24e9)) + +* Clippy fixes ([`7ed355c`](https://github.com/kaskada-ai/kaskada/commit/7ed355c260f1fb85e65443fa3bee4c3cc9ee9e78)) + +* Add unit test ([`46efea9`](https://github.com/kaskada-ai/kaskada/commit/46efea9272c61ce88bbbb9b1f2246935917d55d0)) + +* use block on for calling the async function ([`e67642d`](https://github.com/kaskada-ai/kaskada/commit/e67642d01ec485938acd36bdf1bedf00e46338da)) + +* update commets ([`15741ad`](https://github.com/kaskada-ai/kaskada/commit/15741ad57be1dc9d712e704c78e47dd7a74e7118)) + +* Add execution iterator for pulsar streams ([`5139c88`](https://github.com/kaskada-ai/kaskada/commit/5139c88a1d0e652ee8482e767bb42a43fb4f39dc)) + +* fmt ([`f4a0e4d`](https://github.com/kaskada-ai/kaskada/commit/f4a0e4d2cf8477d7f290cb22603c0969bd7a999f)) + +* cleaned up dockerfiles and added jupyter build (#357) + +removed unused `Dockerfile.notebook` + +trying to see if we can use `Dockerfile.release` in place of +`Dockerfile.integration` since they are so similar. + +Added new `Dockerfile.jupyter` to create a mult-arch release of our +system based off a Jupyter notebook environment with pre-installed +kaskada services and clients. ([`c25fd85`](https://github.com/kaskada-ai/kaskada/commit/c25fd85f5547a7a43c7355962d81145d59c75d49)) + +* updated logging formats of both processes to be more similiar ([`6862285`](https://github.com/kaskada-ai/kaskada/commit/68622850f6bbfaa365f8e6a39436732a6e06d392)) + +* Merge branch 'main' into docker/jupyter ([`32b0744`](https://github.com/kaskada-ai/kaskada/commit/32b07442331460ca68bf392927d9446c18384fd2)) + +* Update .github/workflows/release_engine.yml + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`1ccf90d`](https://github.com/kaskada-ai/kaskada/commit/1ccf90df5b5ba3336a43d655b97c3a5673947ff4)) + +* (ci) updates rust builds for PRs only to use bigger machines (#300) + +[Build sparrow (rust) / Rust +Checks](https://github.com/kaskada-ai/kaskada/actions/runs/4857535060/jobs/8658115221#logs) +succeeded now in 24m 39s + +on 8 core machine ([`8e71e5f`](https://github.com/kaskada-ai/kaskada/commit/8e71e5f822c68e484b1d27c52dfb04a8bca12470)) + +* move back to ubuntu 20.04 ([`99ee60b`](https://github.com/kaskada-ai/kaskada/commit/99ee60bc75f5e631e474d2020abbc4c2bcfba557)) + +* Update Default Data Location to User Directory Cache (#253) (#355) + +Closes #253. ([`903ab8d`](https://github.com/kaskada-ai/kaskada/commit/903ab8db4392d398be5fd1275db9be481eaed29e)) + +* Merge branch 'main' into tsk-libc-test ([`f4b70be`](https://github.com/kaskada-ai/kaskada/commit/f4b70becd494181a218f7c23e783c25899255438)) + +* another attempt ([`855dc0b`](https://github.com/kaskada-ai/kaskada/commit/855dc0b09be0a427c1e70bf70966de3a1c22380f)) + +* cleaned up dockerfiles and added jupyter build ([`042bfb2`](https://github.com/kaskada-ai/kaskada/commit/042bfb2568dfc5ec5ce1b14f1dc2a28e4873a889)) + +* added a jupyter dockerfile ([`d296797`](https://github.com/kaskada-ai/kaskada/commit/d296797e4f389bbadf41bb113e5834c703829aa2)) + +* nil check ([`3e8fef4`](https://github.com/kaskada-ai/kaskada/commit/3e8fef40b5c155dc872620d5740a7e2db2d93b02)) + +* Merge branch 'main' into feature/253-default-data-location ([`e2b1f13`](https://github.com/kaskada-ai/kaskada/commit/e2b1f13ba15ac61ef80c51a6555eded97b9dfc23)) + +* updated the wren database default ([`a3cce42`](https://github.com/kaskada-ai/kaskada/commit/a3cce428a57fda545cba9b1bb2ef569e314d6ce9)) + +* api changes for long-lived materializations in sparrow (#298) ([`a272bad`](https://github.com/kaskada-ai/kaskada/commit/a272badd0278a471025e7e50dc3b00f1f99babc9)) + +* Merge branch 'main' into prod/stream_protos ([`07d8c78`](https://github.com/kaskada-ai/kaskada/commit/07d8c789840f3e4266191e95093743219b6608f1)) + +* Update docs-src/modules/ROOT/pages/installing.adoc + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`caf6aef`](https://github.com/kaskada-ai/kaskada/commit/caf6aeffe713eee9f460cacadba1427803227b00)) + +* Update docs-src/modules/ROOT/pages/installing.adoc + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`6bc5cf6`](https://github.com/kaskada-ai/kaskada/commit/6bc5cf628596d3544113e638265c885a1232c1e4)) + +* updated references to include readthedocs ([`f3f9ebf`](https://github.com/kaskada-ai/kaskada/commit/f3f9ebf2a74707fe0d320f9686d1b9b0a6742247)) + +* update default data location to user directory cache (closes #253) ([`0407f43`](https://github.com/kaskada-ai/kaskada/commit/0407f4391a2916232c71eb69ebe45a0988fbb65f)) + +* added gcp config ([`35df487`](https://github.com/kaskada-ai/kaskada/commit/35df4875b3eb3ade8bbd810f915af45934a64564)) + +* (ci) use callable workflows for docs actions (#351) + +Closes: #349 + +Moved from using the `gh` command to relying on +[convictional/trigger-workflow-and-wait@v1.6.5](https://github.com/convictional/trigger-workflow-and-wait/tree/v1.6.5/) + +* Could not figure out a way to start a workflow and wait on its result +using `gh` +* Had to alter kaskada-ai/docs-site workflows to be callable +* also had to make the kaskada-ai/docs-site workflow responsible for PRs +to take in an inout argument--the branch name for the PR in this repo. + +Relevant changes in +[kaskada-ai/docs-site](https://github.com/kaskada-ai/docs-site/commit/40c0dc8b61dcbdc80dacd5290ec189395379a065) + * I accidentally pushed to `main` on that one sorry ([`cc11cec`](https://github.com/kaskada-ai/kaskada/commit/cc11cece95c0a786e101f05b3c7994c359a0f1c9)) + +* Fix test imports ([`12c5936`](https://github.com/kaskada-ai/kaskada/commit/12c593643bd1b92108b86f4f39fc6491fac6c6d6)) + +* ignore no dataToken error (#350) + +updates the query path to not set the dataToken to the response if it +doesn't exist. It will only not exist on a brand new system, where no +data has been loaded into Kaskada. + +After skipping setting the dataToken field, the code correctly returns +the failed compilation instead, including helpful fenl diagnostics. + +Old Behavior: +``` +> ./kaskada_cli table list +(empty set) +> ./kaskada_cli query run Purchase +9:01PM FTL error="rpc error: code = NotFound desc = data_token not found" +``` + +New Behavior: +``` +> ./kaskada_cli query run Purchase + +state: STATE_FAILURE +config: {} +analysis: {} +fenlDiagnostics: + fenlDiagnostics: + - severity: SEVERITY_ERROR + code: E0006 + message: Unbound reference + formatted: |+ + error[E0006]: Unbound reference + --> Query:1:1 + | + 1 | Purchase + | ^^^^^^^^ No reference named 'Purchase' + | + = No formulas, tables, or let-bound names available + + numErrors: "1" +metrics: {} +requestDetails: + requestId: aaafdf41a2604f771e38207568455cce +``` ([`3f2648b`](https://github.com/kaskada-ai/kaskada/commit/3f2648b6eb704f9e159e67cc2ad27dcb56b5ea7e)) + +* (ci) call apt-get update before installing qemu packages (#346) + +Closes #344 + +Fixes +https://github.com/kaskada-ai/kaskada/actions/runs/4916858931/jobs/8786161907 ([`ac43dd0`](https://github.com/kaskada-ai/kaskada/commit/ac43dd043bb946063d4024fdbec75dd0af0914f4)) + +* Merge branch 'main' into therapon-patch-1 ([`5ebc57e`](https://github.com/kaskada-ai/kaskada/commit/5ebc57ef1cea75d61e97cf5b050403972c0a57e9)) + +* draft: Fail Unsupported Columns (#318) (#323) + +# Fail Unsupported Columns (#318) + +Updates the `FileService` and `PrepareService` to fail for unsupported +columns (Decimal) rather than silently drop the columns. ([`00c2c6b`](https://github.com/kaskada-ai/kaskada/commit/00c2c6b69abd71988a830a16894de3416156b8c1)) + +* ignore no dataToken error ([`511b909`](https://github.com/kaskada-ai/kaskada/commit/511b90953200af63534ab58898310982a0b64034)) + +* fail on unsupported columns (#318) ([`99fa46d`](https://github.com/kaskada-ai/kaskada/commit/99fa46d052868b3a78793a155e5eea5f035ad5fc)) + +* Update sparrow with proto changes ([`45b4a3b`](https://github.com/kaskada-ai/kaskada/commit/45b4a3b5a661803154e3b73c6aeb361a4950e101)) + +* update issue templates (#348) ([`7433cbb`](https://github.com/kaskada-ai/kaskada/commit/7433cbb23e145746fa83a0cc90080531869c0c3e)) + +* update issue templates ([`2113db0`](https://github.com/kaskada-ai/kaskada/commit/2113db0f9e20858968edf06345b5b86ea3cc9a3f)) + +* (docs) fix integrations menu (#345) + +* make `integrations` menu item a link to the index page +* fix unclosed example block ([`8c44afa`](https://github.com/kaskada-ai/kaskada/commit/8c44afa92000454e5576bd597760fd3b9b329e87)) + +* (ci) cal apt-get update before installing qemu packages ([`d9ad6be`](https://github.com/kaskada-ai/kaskada/commit/d9ad6be65383e009a02341f5dc230a33f75a3fca)) + +* (docs) fix integrations menu ([`53b1bfa`](https://github.com/kaskada-ai/kaskada/commit/53b1bfaf6288763107e81407423fe2b3b65fb75b)) + +* Add fenlmagic load ack (#305) ([`a50384b`](https://github.com/kaskada-ai/kaskada/commit/a50384bd3aaa92e48d11db750771d97c1364dad1)) + +* formatted protos ([`7cede87`](https://github.com/kaskada-ai/kaskada/commit/7cede872ac603581d04a291f483c3cb0f2c7ecb6)) + +* made wren changes ([`84e8ae5`](https://github.com/kaskada-ai/kaskada/commit/84e8ae5ca5c3f202aaaf63cffdc27be047400d24)) + +* made suggested changes ([`3b574d8`](https://github.com/kaskada-ai/kaskada/commit/3b574d81a7cef199a805b8323dd5833e08ac533d)) + +* draft: api changes for long-lived materializations in sparrow ([`5235f54`](https://github.com/kaskada-ai/kaskada/commit/5235f5480459fd7bb819d5e64b4b186005c52a58)) + +* updated cli getting started docs (#328) + +also updated load-data docs ([`0af8955`](https://github.com/kaskada-ai/kaskada/commit/0af895534db62a57b26469d3fde9045a77f66390)) + +* made suggested changes ([`57c79ee`](https://github.com/kaskada-ai/kaskada/commit/57c79eefc9b08c96fd07e24f0caea77492e41c76)) + +* Optional PATH docs cli ([`8400a26`](https://github.com/kaskada-ai/kaskada/commit/8400a26b6b5cef8e5b72660ff15c61e9a42ce535)) + +* create release v0.6.2 (#329) ([`1ee0d3c`](https://github.com/kaskada-ai/kaskada/commit/1ee0d3cc34a4b1ce6acb98dcd908d723cdd236ff)) + +* also update loading data ([`b3984b5`](https://github.com/kaskada-ai/kaskada/commit/b3984b5a6a52936297c7aa9a193a3f44fa91c705)) + +* updated cli getting started docs ([`e869e48`](https://github.com/kaskada-ai/kaskada/commit/e869e489dc3bb175191c0d324861052612949695)) + +* updated docs for new CLI resources (#322) + +* also moved `sync` command stuff to its own file. + +--------- + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> +Co-authored-by: Kevin J Nguyen <kevinjnguyen2@gmail.com> ([`b876c73`](https://github.com/kaskada-ai/kaskada/commit/b876c732017a23a6881bcefb90b0a705a6542fb4)) + +* fixed sync export (#321) ([`71e80ca`](https://github.com/kaskada-ai/kaskada/commit/71e80ca9463f6cc6d8082944d00587154610420f)) + +* Change README to point at Slack rather than GH Discussions. (#325) + +Also, reword to be more encouraging. ([`2c84805`](https://github.com/kaskada-ai/kaskada/commit/2c84805c8de80d7dcada2145c84a3eb3a37820dd)) + +* Change README to point at Slack rather than GH Discussions. + +Also, reword to be more encouraging. ([`c08a00c`](https://github.com/kaskada-ai/kaskada/commit/c08a00c18d0d7640815511776bf7981938441dd3)) + +* Align the two getting started docs (#311) + +This uses the same flow and code examples for both Jupyter and the CLI. ([`bcfa69e`](https://github.com/kaskada-ai/kaskada/commit/bcfa69e0ebc9c8752a4faeb4750596e6b0ac0654)) + +* reverted logging level changes ([`415fc65`](https://github.com/kaskada-ai/kaskada/commit/415fc654bf13ebef510bde1b522ed13926a2dcd8)) + +* fixed sync export ([`f3a2568`](https://github.com/kaskada-ai/kaskada/commit/f3a2568177ae0b800ee6bcc0bd7dfb0557d8d592)) + +* Added create/list/get/delete endpoints for tables/views/materializations (#319) + +Also made a ton of other improvements + +I'd like to get this merged and published ASAP without waiting for the +doc changes, so that a few of us can work on the doc updates more easily +together. + +Should help #202, #278 ([`b290960`](https://github.com/kaskada-ai/kaskada/commit/b2909608625044380768c0af375606dd0cb589d4)) + +* fix bug ([`14e5da8`](https://github.com/kaskada-ai/kaskada/commit/14e5da8edca0f26ab2ea27d99ee109221405d9be)) + +* Add link to catalog ([`80b2696`](https://github.com/kaskada-ai/kaskada/commit/80b2696c24add253d7b3029db7ee2f512536ff4a)) + +* Apply suggestions from code review + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`e2e537e`](https://github.com/kaskada-ai/kaskada/commit/e2e537eea783c2260d5544d72040eb075f97f587)) + +* fixed tests ([`8549437`](https://github.com/kaskada-ai/kaskada/commit/854943781128655f0164d1b1997802d84b6c813a)) + +* wip ([`fbbef2e`](https://github.com/kaskada-ai/kaskada/commit/fbbef2ec5d8a298f1fd7d6d08d4254598bc8bf98)) + +* added basic list cmd to each resource ([`349bb71`](https://github.com/kaskada-ai/kaskada/commit/349bb71e3622840e0eb4f969b60c7abfac4a7ec1)) + +* made a slew of updates ([`493832f`](https://github.com/kaskada-ai/kaskada/commit/493832f8027820d14510e127f8158d8dd298a891)) + +* Update Log Output Files Documentation (#274) (#313) + +# Description + +Documentation updates for #304. ([`6845dad`](https://github.com/kaskada-ai/kaskada/commit/6845dadcd9259c035bff6669fc2e9caa4c1e4493)) + +* (ci) adds S3 integration tests (#315) + +Takes care of the first task in #232 ([`bfd7420`](https://github.com/kaskada-ai/kaskada/commit/bfd742007d870fdf8295f22c82aa0a65709df862)) + +* adds S3 integration tests ([`1b8942a`](https://github.com/kaskada-ai/kaskada/commit/1b8942a041539483a61684383c94988e492ec0dd)) + +* stopped removing output fields for non-sync ([`3bb9ef2`](https://github.com/kaskada-ai/kaskada/commit/3bb9ef2bf6d2bfa49890cfd9c96378223b2bf5d5)) + +* added create & get support to table, view, materialization ([`4d028a5`](https://github.com/kaskada-ai/kaskada/commit/4d028a50e198df918543897e0ce28a26b42484c9)) + +* Update Log Output Files (#274) (#304) + +# Description + +Updates the logging for the Manager and the Engine to output to: +~~`~/.cache/kaskada/logs/<service_name>/<service_name>-<stderr/std-out>-<timestamp>.log`~~ + +`~/.cache/kaskada/logs/<timestamp>-<service_name>-<std-err/std-out>.log`. + +Example: +* `~/.cache/kaskada/logs/2023-05-02T18-11-25-engine-stderr.log` +* `~/.cache/kaskada/logs/2023-05-02T18-11-25-engine-stdout.log` ([`6d6491d`](https://github.com/kaskada-ai/kaskada/commit/6d6491d6c1d65dccbd8adbb780720b830780c711)) + +* Update Log Output Files (Closes #274) ([`80d05de`](https://github.com/kaskada-ai/kaskada/commit/80d05de082db2138fb6e6d79192138a867247241)) + +* Update Log Output Files Documentation (#274) ([`0ae223d`](https://github.com/kaskada-ai/kaskada/commit/0ae223dc8003877c79fcfc7b5d424b47f2f4f3af)) + +* (ci) fix typo: should-skip => should_skip (#310) + +CI integration jobs were not being skipped for PRs that did not alter +any code relevant to sparrow or wren due to a typo in YAML. ([`c4b8a2c`](https://github.com/kaskada-ai/kaskada/commit/c4b8a2c78f0fa0a5c8bc57ad5113ea962f9378bb)) + +* (docs) updated release instructions (#309) + +* updates the python drafter to mark python releases using +`python@vX.Y.Z`, it was missing the `v` +* Updates release docs to +1. Edit the draft release created by drafter rather than create a new +release. Has all the info and is easier + 2. Remove command line instructions that created a fresh release. ([`961400b`](https://github.com/kaskada-ai/kaskada/commit/961400b44d2ffe2de854f07da91a144ecb7daa8c)) + +* Align the two getting started docs + +This uses the same flow and code examples for both Jupyter and the CLI. ([`b82f365`](https://github.com/kaskada-ai/kaskada/commit/b82f365048b39815359ffc962a2ab4f32c83a466)) + +* (docs) updated release instructions ([`7dba211`](https://github.com/kaskada-ai/kaskada/commit/7dba21106fb983bc3ed3835b80e0b440525c1ff5)) + +* Add an example using Docker and a notebook to get started quickly wit… (#306) + +…h some synthetic data ([`98e2a72`](https://github.com/kaskada-ai/kaskada/commit/98e2a7294b032467aa582ce56db37cfd14cd1671)) + +* Merge branch 'main' into rm/trial ([`d32cd07`](https://github.com/kaskada-ai/kaskada/commit/d32cd07e396296db20b111a3c7ccc580fd4106b5)) + +* Add an example using Docker and a notebook to get started quickly with some synthetic data ([`e9cbc1e`](https://github.com/kaskada-ai/kaskada/commit/e9cbc1ea1d7a820861f65e105528073b489debd7)) + +* Update pyproject.toml to 0.1.7 (#307) ([`14c650b`](https://github.com/kaskada-ai/kaskada/commit/14c650b12ddbaa0463db8a05dd6fece92805f544)) + +* Updates Python Client to use file URI Prefix (#240) + +# Description +Closes #239 + +## Changes +* Updates the python client to accept the file:// prefix for file paths +and removes the logic for converting to a URI ([`f5ba06c`](https://github.com/kaskada-ai/kaskada/commit/f5ba06caf7b5f5d2527c806f72b5e330524ccd87)) + +* use different runs-on targets for custom runners ([`174f1f9`](https://github.com/kaskada-ai/kaskada/commit/174f1f9e1a5559772164790493f4a4301e6206d6)) + +* Merge pull request #296 from kaskada-ai/rm/docs-jupyter + +Revise Jupyter hello-world to align to the new getting-started and ju… ([`c9f3ac2`](https://github.com/kaskada-ai/kaskada/commit/c9f3ac2044cc2f2828b78e17668d3d86b0c67ae1)) + +* Merge branch 'main' into rm/docs-jupyter ([`553925b`](https://github.com/kaskada-ai/kaskada/commit/553925b8dd94dc809785f6d859eb62ff54c85a2e)) + +* Update docs-src/modules/getting-started/pages/hello-world-jupyter.adoc + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`b043766`](https://github.com/kaskada-ai/kaskada/commit/b043766fc15c682eec5c40579e305a7ea2dad42c)) + +* Merge pull request #291 from kaskada-ai/rm/docs-flow + +Rewrite the "getting started" page ([`75f5179`](https://github.com/kaskada-ai/kaskada/commit/75f5179071aa123dbdee356ff86b9340d185cdc8)) + +* Merge pull request #301 from kaskada-ai/wren/load_errors + +added file existence and accessibility checks on load ([`e947b34`](https://github.com/kaskada-ai/kaskada/commit/e947b34e9e8bd611bcce507563c2f8af1e946883)) + +* added tests of missing and unaccessible files ([`c9fed4b`](https://github.com/kaskada-ai/kaskada/commit/c9fed4b72739ac8f41d8ff4baea179f26e969b65)) + +* added checks on load file ([`21f818f`](https://github.com/kaskada-ai/kaskada/commit/21f818f7b3b7b92c1d2652afafdd8c4013fd8b48)) + +* Merge branch 'main' into 239-python-client-file-prefix ([`27ba579`](https://github.com/kaskada-ai/kaskada/commit/27ba5793e04e62ca4402805ef30594ca189fb5f6)) + +* poetry types ([`fda2b59`](https://github.com/kaskada-ai/kaskada/commit/fda2b598f80972c1ea4c03ae2abca9c8dd562b66)) + +* code review comments/add support for file:// ([`3813188`](https://github.com/kaskada-ai/kaskada/commit/381318836d2eecef25b028c7a78a3011bb45a11d)) + +* Revert "closes #239" + +This reverts commit a96ab14fb3bf9fcdfa401751385a9a1fc476476e. ([`f723867`](https://github.com/kaskada-ai/kaskada/commit/f723867dbb5eb4a5d824a091532debaa6ea90485)) + +* Revert "update docs" + +This reverts commit c61171c431ca0c4c99a201c3a37d4b21c58ca0a7. ([`a1fc427`](https://github.com/kaskada-ai/kaskada/commit/a1fc4276f3fe5bfeb6dae521ae9f54083ec1b115)) + +* Merge branch 'main' into 239-python-client-file-prefix ([`74996b2`](https://github.com/kaskada-ai/kaskada/commit/74996b2781917401b5ec54fe005e5ce3555c4c8a)) + +* Merge pull request #231 from kaskada-ai/feature/use-object-store + +Object Store for FileService and PrepareService ([`deafc8c`](https://github.com/kaskada-ai/kaskada/commit/deafc8c96ee583893a29d20f18b69eaa5826ffe8)) + +* Merge pull request #293 from kaskada-ai/bug-add-ignored-test-when-non-null-behavior + +bug: add ignored test for unexpected non-null when behavior ([`ffd5537`](https://github.com/kaskada-ai/kaskada/commit/ffd5537e9a7f4c585dc498a11f33e9ca04b19c84)) + +* clippy ([`03c6259`](https://github.com/kaskada-ai/kaskada/commit/03c6259ee56033bb6cae5cca09f5132a1267a9f7)) + +* reverted wren changes, cleaned up env vars, fixed sparrow ([`6529ec3`](https://github.com/kaskada-ai/kaskada/commit/6529ec35f3ceefc6c6f7fbaaad128f8bd3010171)) + +* Clean up variable naming and fix output prefix bug ([`f89a9e5`](https://github.com/kaskada-ai/kaskada/commit/f89a9e573efa16f52daf07471fc58f8372823244)) + +* made suggested edits ([`af9fb6e`](https://github.com/kaskada-ai/kaskada/commit/af9fb6e8fb4491d0eb8501e6ab134545aa121dd4)) + +* more unwrap handling ([`70b4deb`](https://github.com/kaskada-ai/kaskada/commit/70b4deb2bcff391a531bdc66d4f571b0cf3f55c7)) + +* Add error stack and clean up a bit ([`d2847f0`](https://github.com/kaskada-ai/kaskada/commit/d2847f0125cd3f314bb09e16de73f2089648ed2e)) + +* updated tests ([`9d00ea3`](https://github.com/kaskada-ai/kaskada/commit/9d00ea3fb0b35146dcf558a3a8216bc351b95b6f)) + +* cargo clippy ([`9cf32b2`](https://github.com/kaskada-ai/kaskada/commit/9cf32b2c71fb953b46c70ecd26e0295e3ebe2fa1)) + +* added example notebook ([`62d2f6f`](https://github.com/kaskada-ai/kaskada/commit/62d2f6fe04d1cf56c9855441e27977dd3f3fb021)) + +* updated wren ([`5b0f5e2`](https://github.com/kaskada-ai/kaskada/commit/5b0f5e24a09abab2983c2ca084e15e5719a21082)) + +* prepare changes ([`8ce2dad`](https://github.com/kaskada-ai/kaskada/commit/8ce2dade6046d516f37479df2081268feb805337)) + +* removed s3 helper ([`cc84355`](https://github.com/kaskada-ai/kaskada/commit/cc8435502a4b5b93c6e3d1ebf9ab13e8532dd950)) + +* added upload ([`a0e7a31`](https://github.com/kaskada-ai/kaskada/commit/a0e7a3136c93e57546c75ef838881046102018d7)) + +* updated wren ([`cbd1e17`](https://github.com/kaskada-ai/kaskada/commit/cbd1e17b4c5d826a82120bd32583671c3dd6050e)) + +* updated preparation ([`4234f9f`](https://github.com/kaskada-ai/kaskada/commit/4234f9f21231eeefaaffd0f7b1ef92d21d72ac4d)) + +* revise ignore message ([`e000336`](https://github.com/kaskada-ai/kaskada/commit/e000336198e431477571da5b031f60d9c527099c)) + +* Add ignored test for unexpected non-null when behavior ([`acb3a13`](https://github.com/kaskada-ai/kaskada/commit/acb3a132a554914aed0fffbe2dfd5ab5ab7df0df)) + +* Comments ([`4d7f6c9`](https://github.com/kaskada-ai/kaskada/commit/4d7f6c96c459a1feee088d3dfc5fe2f8006cb41f)) + +* Comments ([`623a3cb`](https://github.com/kaskada-ai/kaskada/commit/623a3cbd5e978e68f13b4efbbe13441bc85f2d69)) + +* Revise Jupyter hello-world to align to the new getting-started and just be cleaner. ([`7c5d352`](https://github.com/kaskada-ai/kaskada/commit/7c5d352055812153bc6d3ef9e6c8110a38ce2ae3)) + +* Updates per comments, and some additional intro encouraging people to actually read this stuff ([`32702de`](https://github.com/kaskada-ai/kaskada/commit/32702de30cdcec3e3ffa28dd15730c6ef9997863)) + +* Rewrite the "getting started" page + +The goal here is to quickly introduce the concepts needed to understand +Kaskada, and provide links to deeper reading as appropriate. ([`fa834f3`](https://github.com/kaskada-ai/kaskada/commit/fa834f3dae22aa97c040646d39d21d0127279b42)) + +* Merge pull request #290 from kaskada-ai/local_tests + +upgraded integration tests to be runnable locally ([`0d959d8`](https://github.com/kaskada-ai/kaskada/commit/0d959d80ab9b33e0ff4fe5ac22183508bfed13bc)) + +* Merge pull request #285 from kaskada-ai/rm/docs-cli-query + +Add a section to querying documenting the CLI, link to PB and Python … ([`a8fbeef`](https://github.com/kaskada-ai/kaskada/commit/a8fbeef92b68dc9a65fbfbda9bae9bc6f93a8195)) + +* upgraded integration tests to be runnable locally ([`8f36fd3`](https://github.com/kaskada-ai/kaskada/commit/8f36fd3ea4161e1d0c6394402fc168a6e34fc956)) + +* Merge branch 'main' into rm/docs-cli-query ([`b24584f`](https://github.com/kaskada-ai/kaskada/commit/b24584f54601bc46c6191fa1a17e422abd357dc4)) + +* fix timeline ([`55b3b2b`](https://github.com/kaskada-ai/kaskada/commit/55b3b2bc2413b7df5d556c37412b2a741f69bdd9)) + +* Merge pull request #286 from kaskada-ai/tsk-release-docs + +(docs) adds steps on releasing engine and python components ([`69bf4ad`](https://github.com/kaskada-ai/kaskada/commit/69bf4ad600982bb75401412c7d16107c3bd43a18)) + +* Merge pull request #287 from kaskada-ai/rm/docs-tabs + +Use tabs to separate different implementations of the same logic. ([`7ebc8ff`](https://github.com/kaskada-ai/kaskada/commit/7ebc8ffa4436523a248c6285aeb21bc75bc48daf)) + +* Merge pull request #284 from kaskada-ai/rm/docs-fenlplugin + +Use plugin for visualizations ([`095c7e0`](https://github.com/kaskada-ai/kaskada/commit/095c7e06fc8a64fb707ff7baca92a0e2b2a28767)) + +* Use '.fenl' for query files ([`e588ae2`](https://github.com/kaskada-ai/kaskada/commit/e588ae26127b8839519b2d76670f9c692f90d3c0)) + +* Apply suggestions from code review + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`567d524`](https://github.com/kaskada-ai/kaskada/commit/567d52414db3d36e8e63a8a65d0177c3e067e01b)) + +* (docs) adds steps on releasing python engine on command line ([`8fd56bf`](https://github.com/kaskada-ai/kaskada/commit/8fd56bffbd1c08416e2cf82149a0131b35703dea)) + +* Apply suggestions from code review + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`4706f1d`](https://github.com/kaskada-ai/kaskada/commit/4706f1d8e7d3f5d0455e8e6beeedb81a33b3da3d)) + +* Delete unused images ([`e580bc0`](https://github.com/kaskada-ai/kaskada/commit/e580bc06220214f462a071755e6e4b8639d205c4)) + +* Use tabs to separate different implementations of the same logic. ([`d02b1eb`](https://github.com/kaskada-ai/kaskada/commit/d02b1eb9c2913b26fb8d495ed9466a052032fcff)) + +* (docs) adds steps on releasing engine and python components ([`c9d7540`](https://github.com/kaskada-ai/kaskada/commit/c9d75408e2b7e6cbb3d3cacef967edde0f8b71cc)) + +* Add a section to querying documenting the CLI, link to PB and Python docs ([`2431c69`](https://github.com/kaskada-ai/kaskada/commit/2431c69a6be8a8a5b3c4772f5c5b85cfa4f2bd0a)) + +* Merge pull request #281 from kaskada-ai/rm/docs-misc + +Simple docs improvements ([`4e2fc5b`](https://github.com/kaskada-ai/kaskada/commit/4e2fc5b40b2476bffd2f9eeb9f1380fa72989c1c)) + +* bumping versions for release (#282) ([`4f5aff1`](https://github.com/kaskada-ai/kaskada/commit/4f5aff172ef99909d96a1b9b209f87681a296d13)) + +* Use plugin for visualizations ([`b684e99`](https://github.com/kaskada-ai/kaskada/commit/b684e99e28376b8a8fc2b746c026e5829f2ffb8d)) + +* Remove dollars ([`3e9f5b0`](https://github.com/kaskada-ai/kaskada/commit/3e9f5b0764796219b3f34bc1a264035b74c75730)) + +* Apply suggestions from code review + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`c2f8fc2`](https://github.com/kaskada-ai/kaskada/commit/c2f8fc2bb9a54eaf9c735b7ae227fd9f972405b3)) + +* Simple docs improvements + +* Show running a query from a file in CLI examples, rather than heredoc +* Make it easier to find “windowing” in the doc nav +* Call out requirements in the “installing” section for Python +* Use the same install script in quickstart and installing section +* Add binaries to PATH +* Show the “you need to create a table first” warning before every place we load data +* Document how to connect to arbitrary endpoints as an alternative to a the local session +* Link to “create a table” when we suggest people should do so. +* Add curly-braces note the first time records are used in each page of the docs. ([`132c988`](https://github.com/kaskada-ai/kaskada/commit/132c9881b11aad749f75568e2f6d6ff23e1b21dd)) + +* Merge pull request #276 from kaskada-ai/improve-timeine-notebook + +docs: Further refine the timelines notebook ([`30fc35f`](https://github.com/kaskada-ai/kaskada/commit/30fc35fd24d88698a5809c25b681a7012af2da4d)) + +* updated buf and plugins (#270) + +resolves #242 + +--------- + +Co-authored-by: Therapon Skoteiniotis <therapon@gmail.com> +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`975700a`](https://github.com/kaskada-ai/kaskada/commit/975700af4f9c6906cf61868a35e84a8df79bb3d7)) + +* Merge pull request #275 from kaskada-ai/tsk-docs-troubleshooting-guide + +(docs) adds troubleshooting section and an entry for logs ([`e20c267`](https://github.com/kaskada-ai/kaskada/commit/e20c267c40d31cf2d45505dfd6f6bf24fe83d6e8)) + +* Merge pull request #271 from kaskada-ai/rm/trial-3 + +Update docs to improve clarity and usability ([`ad17147`](https://github.com/kaskada-ai/kaskada/commit/ad1714742c273bd6abc3d0fb6fcb4a4127551316)) + +* (ci) include a build of the docs on each PR but do not publish (#265) ([`e19185b`](https://github.com/kaskada-ai/kaskada/commit/e19185bef9e06a3ca63910c0411077ae6aa0e201)) + +* (docs) adds troubleshooting section and an etry for logs ([`656ff78`](https://github.com/kaskada-ai/kaskada/commit/656ff78f5bb5f28768b118997ae3730bf7c85139)) + +* Merge pull request #269 from kaskada-ai/timeline-example-notebook + +docs: Add example based on timeline presentation ([`2d19b48`](https://github.com/kaskada-ai/kaskada/commit/2d19b48146c22fab8126b79a5dc0adf4b349c696)) + +* move notebooks ([`c24a7a4`](https://github.com/kaskada-ai/kaskada/commit/c24a7a43fc4b6bc114e7738844a5e75c54359f41)) + +* Fix wording ([`baa22fd`](https://github.com/kaskada-ai/kaskada/commit/baa22fd96bed63570e2ae221cd8d576bffc082a4)) + +* Apply suggestions from code review + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`a1ed8fe`](https://github.com/kaskada-ai/kaskada/commit/a1ed8fe1a9472a7b6ac210f910cf1a8a788fe426)) + +* Update docs to improve clarity and usability + +* Explain what data tokens are for everywhere they’re mentioned +* Fix “--result_behavior” in docs +* Start query examples with a table list (ie “Purchase”) +* Show “when” inside brackets and out early on +* Cross-link to function catalog anywhere we’re introducing syntax ([`e499760`](https://github.com/kaskada-ai/kaskada/commit/e49976076c9d0729b12a22446b9735e67a874e89)) + +* (docs) updates link to deleting a table (#264) ([`43d8e98`](https://github.com/kaskada-ai/kaskada/commit/43d8e984066dfea99cc7531be7954f31ae6bd5a5)) + +* (docs) fixes link in docs -- deleting tables with cli (#263) ([`cf0b290`](https://github.com/kaskada-ai/kaskada/commit/cf0b29039f3a9ad028cdcfc8de6a4015b97c9091)) + +* Merge pull request #262 from kaskada-ai/cli/delete + +added resource delete to the cli ([`c02d851`](https://github.com/kaskada-ai/kaskada/commit/c02d85153553d094b686c4550d7659a5af13938c)) + +* updated docs ([`c86e7d2`](https://github.com/kaskada-ai/kaskada/commit/c86e7d2c4649dc9b4d6182123622a9daa5f99802)) + +* updated docs ([`ad03d6e`](https://github.com/kaskada-ai/kaskada/commit/ad03d6e2367f1d6077e594e4e7574a6073c6a771)) + +* added resource delete to the cli ([`b3cf5f5`](https://github.com/kaskada-ai/kaskada/commit/b3cf5f5f7c3f87beeb7fc82076f0cc165d4a696d)) + +* Merge pull request #261 from kaskada-ai/tsk-python-client-docs + +(docs) RTD configuration ([`1d82745`](https://github.com/kaskada-ai/kaskada/commit/1d82745a0d2c6b4385e30b1adac195fedb5eabca)) + +* rust advisory ([`2b61796`](https://github.com/kaskada-ai/kaskada/commit/2b617967c92fcbb7f990abd9c87b2df0d831602f)) + +* moving .readthedocs.yaml file ([`5990758`](https://github.com/kaskada-ai/kaskada/commit/59907585ca4d2418be6d7762fb16770301ae497f)) + +* fixes paths and adds typing info in generated html ([`23e7a12`](https://github.com/kaskada-ai/kaskada/commit/23e7a1297b769ef6123424405614f25c1957f01a)) + +* Merge pull request #213 from kaskada-ai/pulsar/last_publish_time + +changed last_publish_time to i64 ([`d4ec77c`](https://github.com/kaskada-ai/kaskada/commit/d4ec77c881fc8a267367cc460c7bcf74fbaa1ad0)) + +* ensure that _publish_time never goes backwards ([`91c82fd`](https://github.com/kaskada-ai/kaskada/commit/91c82fdaafb3b4e662a411cd80b4b2ce570e61ea)) + +* update sparrow for data type change ([`ed19933`](https://github.com/kaskada-ai/kaskada/commit/ed199333ba3dd31a4652c9b71bbb56fd4323a7de)) + +* changed last_publish_time to i64 ([`71cd666`](https://github.com/kaskada-ai/kaskada/commit/71cd666b23e3eea19d8ed0a8b5825993fb580992)) + +* Merge pull request #235 from kaskada-ai/rm/docs-trial2b + +Improve query section of docs ([`18974d8`](https://github.com/kaskada-ai/kaskada/commit/18974d8e436dd9440079887ceff3effb2cb49e5c)) + +* Merge pull request #245 from kaskada-ai/tsk-read-the-docs-config + +(docs) adds extension to automatically call docs generate ([`4025dba`](https://github.com/kaskada-ai/kaskada/commit/4025dba9ed4351a29c8fd77b87d2581aa805bdca)) + +* Merge pull request #246 from kaskada-ai/tsk-ci-matrix-fix + +(ci) workaround to allow python client workflow to be required ([`14a6636`](https://github.com/kaskada-ai/kaskada/commit/14a663604d2d120ef8e5e65b3cb2c6476fef02a2)) + +* Merge branch 'main' into tsk-read-the-docs-config ([`d69eb97`](https://github.com/kaskada-ai/kaskada/commit/d69eb97402f93399f83c3b9eb26991c059bd2e7c)) + +* Merge branch 'main' into tsk-ci-matrix-fix ([`f093267`](https://github.com/kaskada-ai/kaskada/commit/f093267d5fad45ca941eceb6819d27ccd3a67dfe)) + +* Fix goof ([`2c655a4`](https://github.com/kaskada-ai/kaskada/commit/2c655a4a0dc304132066e158b51ffc6ac8fe2347)) + +* Fix string interpolations ([`ab7fa97`](https://github.com/kaskada-ai/kaskada/commit/ab7fa9731ab775d6e37f83fb76d087160f820372)) + +* Comments ([`0a141ad`](https://github.com/kaskada-ai/kaskada/commit/0a141ad0a20cdfdfcd843adf039a55c7f727d97b)) + +* Typo ([`a5c7a5f`](https://github.com/kaskada-ai/kaskada/commit/a5c7a5fc7bc42d34c9c13a72076250e63a9735f0)) + +* Improve query section of docs + +This was sort of a grab bag, hopefully it's more useful now. ([`af9d6e2`](https://github.com/kaskada-ai/kaskada/commit/af9d6e2823fb72c0ea4777748fa29114f2d154e6)) + +* updated version to 0.1.5 (#247) ([`4c5cea6`](https://github.com/kaskada-ai/kaskada/commit/4c5cea62395929ada1037ffed49e5870af32b941)) + +* (ci) workaround to allow python client workflow to be required ([`8eb91fa`](https://github.com/kaskada-ai/kaskada/commit/8eb91fafbe811fddab347de4bdbacdadd312af81)) + +* remove old workaround target ([`5e36845`](https://github.com/kaskada-ai/kaskada/commit/5e36845edac1a3393d1ba8f3de09f5d6167a6088)) + +* Merge branch 'tsk-read-the-docs-config' of github.com:kaskada-ai/kaskada into tsk-read-the-docs-config ([`f94b7b8`](https://github.com/kaskada-ai/kaskada/commit/f94b7b87ae4606509746894f41d855b594a064da)) + +* better solution using extension ([`09256f7`](https://github.com/kaskada-ai/kaskada/commit/09256f7d6119e40f61f1dda3aa3f2589e02073de)) + +* Merge branch 'main' into tsk-read-the-docs-config ([`5e2c257`](https://github.com/kaskada-ai/kaskada/commit/5e2c2578d6f2fe37dd553e570557f716d703de7d)) + +* (docs) inject call to sphinc-apidoc so that RTD works correctly ([`c12818a`](https://github.com/kaskada-ai/kaskada/commit/c12818adb1018b870c891146bb4feb41d9928c1d)) + +* Merge pull request #244 from kaskada-ai/tsk-read-the-docs-config + +adds more required deps for docs ([`1e92692`](https://github.com/kaskada-ai/kaskada/commit/1e926925bf210205d53e4d95521523ac3502465c)) + +* Merge pull request #241 from kaskada-ai/python/no_download + +added environment variable to disable client downloads ([`fb3d584`](https://github.com/kaskada-ai/kaskada/commit/fb3d584e3e420abc395d0178dc11e7112197f91d)) + +* Merge branch 'main' into tsk-read-the-docs-config ([`9be0e19`](https://github.com/kaskada-ai/kaskada/commit/9be0e1984a7609ffbacde529efd77703be9f651f)) + +* adds more required deps for docs ([`497e754`](https://github.com/kaskada-ai/kaskada/commit/497e754b8d4a5358cbac5821d389c3c9dd2e9592)) + +* Merge pull request #243 from kaskada-ai/tsk-read-the-docs-config + +removes quotes from version number in requirements.txt ([`57e6658`](https://github.com/kaskada-ai/kaskada/commit/57e6658103dc0a1eb7f7dba4f3d19805d67138d9)) + +* added environment variable to disable client downloads ([`551d3b7`](https://github.com/kaskada-ai/kaskada/commit/551d3b7d195823bdaf45d7ce7395110ccc01bdc8)) + +* Merge branch 'main' into tsk-read-the-docs-config ([`1a29ac6`](https://github.com/kaskada-ai/kaskada/commit/1a29ac6c1578589a4e0baad64544d9555057ac26)) + +* removes quotes from version number in requirements.txt ([`b85f0ff`](https://github.com/kaskada-ai/kaskada/commit/b85f0fff4352f3b66d0a293674cc7ac331d17e93)) + +* Merge pull request #237 from kaskada-ai/tsk-read-the-docs-config + +(docs): config for readthedocs + deps for building docs ([`f5a44c5`](https://github.com/kaskada-ai/kaskada/commit/f5a44c5817db9466fcbcd19e6b480d6883b31ddc)) + +* update docs ([`c61171c`](https://github.com/kaskada-ai/kaskada/commit/c61171c431ca0c4c99a201c3a37d4b21c58ca0a7)) + +* closes #239 ([`a96ab14`](https://github.com/kaskada-ai/kaskada/commit/a96ab14fb3bf9fcdfa401751385a9a1fc476476e)) + +* Merge branch 'tsk-read-the-docs-config' of github.com:kaskada-ai/kaskada into tsk-read-the-docs-config ([`f0736aa`](https://github.com/kaskada-ai/kaskada/commit/f0736aa19b181e70d7d77bae21b2c01e0e89eebd)) + +* fix typo ([`1d88f3d`](https://github.com/kaskada-ai/kaskada/commit/1d88f3dc7b3f2543c95ddeea43947269a466e126)) + +* Merge branch 'main' into tsk-read-the-docs-config ([`2cd5675`](https://github.com/kaskada-ai/kaskada/commit/2cd5675e77e9b15da9269b689bd0053c3ac25c35)) + +* format ([`81897bf`](https://github.com/kaskada-ai/kaskada/commit/81897bf64c5d57d8628609eea47e8ecde9f08851)) + +* Merge pull request #238 from kaskada-ai/rm/docs-trial2c + +Use the correct path syntax and highlight with bash ([`1c4a4e6`](https://github.com/kaskada-ai/kaskada/commit/1c4a4e6d596f7f8abcd44b255c0cdbbb8199e051)) + +* make mypy happy ([`792f7c2`](https://github.com/kaskada-ai/kaskada/commit/792f7c23246f34b5f83eeafc907eb3b083b40aea)) + +* make mypy happy ([`8db2a75`](https://github.com/kaskada-ai/kaskada/commit/8db2a75515c65dfb56e731b513a863faaeb8c822)) + +* Formatting ([`12cefd0`](https://github.com/kaskada-ai/kaskada/commit/12cefd03dd66ac25714d653e8eeca470f0403c73)) + +* Use the correct path syntax and highlight with bash + +Bash has prettier formatting than shell. ([`2a27a9f`](https://github.com/kaskada-ai/kaskada/commit/2a27a9f21244a384f27769d3c4981c1043a39373)) + +* formatting ([`f958aa8`](https://github.com/kaskada-ai/kaskada/commit/f958aa82fcdaff0c411cd96ca9f531eaf137cc74)) + +* (docs): config for readthedocs + deps for building docs ([`a0d21d4`](https://github.com/kaskada-ai/kaskada/commit/a0d21d48bfd884aea484150dbf114721559f7fcc)) + +* Merge pull request #234 from kaskada-ai/rm/docs-trial2 + +Updates from second trial ([`21a3384`](https://github.com/kaskada-ai/kaskada/commit/21a338426eda4e620dd9673d89a734a687224a90)) + +* Updates from second trial + +* Explain how to know if you need pip or pip3 in the “tip” +* Remove redundant title in table loading warning. +* Change titles involved with loading to make it clear that you’re loading into a table +* Output the log location when starting a subprocess in Python +* Fix method names “load_file -> load” +* Improve “client must be provided” error to suggest creating a session +* Add comments to arguments in all examples clarifying the argument’s meaning +* Reduce use of Pandas where possible +* Remove use of “search” parameter in table list call (or move to a subsection related to searching) ([`bc298bb`](https://github.com/kaskada-ai/kaskada/commit/bc298bbcb3b2d03ec76043f8c3429bcfbd1c7461)) + +* Merge pull request #221 from kaskada-ai/tsk-python-client-docs + +(docs): adds python client generated docs ([`9e9ef1d`](https://github.com/kaskada-ai/kaskada/commit/9e9ef1d1144c0572b317378c6c829c72dfbe9586)) + +* Merge branch 'tsk-python-client-docs' of github.com:kaskada-ai/kaskada into tsk-python-client-docs ([`be9fe83`](https://github.com/kaskada-ai/kaskada/commit/be9fe830b04c4e64a5808f70a0635dc78d980be4)) + +* populates project metadata for Sphinx from pyproject.toml ([`1d83957`](https://github.com/kaskada-ai/kaskada/commit/1d83957f575ecb07e5047f2433b8b0992af12550)) + +* remove commented out statements in conf.py ([`f949952`](https://github.com/kaskada-ai/kaskada/commit/f949952051a3e565049cd8fb687517ebfcd878c4)) + +* Merge branch 'main' into tsk-python-client-docs ([`04dbbbd`](https://github.com/kaskada-ai/kaskada/commit/04dbbbdb88701bd3e0b8530c5a43cc5053ccad2a)) + +* Merge pull request #230 from kaskada-ai/rm/docs-trial1 + +Improvements from the first trial results ([`c7981b1`](https://github.com/kaskada-ai/kaskada/commit/c7981b1cd2d8ccfa8d7b0e02a0d789bc9c67e5f6)) + +* Merge branch 'main' into rm/docs-trial1 ([`f804f81`](https://github.com/kaskada-ai/kaskada/commit/f804f818ea766f95c1813f09895001bdc3f389d5)) + +* Improvements from the first trial results + +* Document the file types we accept in the data loading section +* Add a “tip” clarifying that Python and CLI are separate interfaces, and installing from pip doesn’t give you the CLI +* See if we can make it clearer that you have to create a table before you load data into it. +* Use “kaskada-cli” consistently throughout docs +* For CLI quickstart run the processes before starting the CLI so it’s clear you have to do that first. +* Add a help tip if CLI can’t connect to endpoint - “do you need to start the kaskada services?” +* Add a tip that you may need to “allow services” the first time you run services. +* Document what each field is for in the example YAML file +* “Expected file format” and “entity” links are broken in CLI quickstart +* Call out that subsort is optional +* Make hello world queries more incremental, document all the command flags being used. +* Explain “pipelineing” earlier on +* Document “final” more prominently ([`f991368`](https://github.com/kaskada-ai/kaskada/commit/f9913681577233c59348d28e9c08318092d09014)) + +* Merge branch 'main' into tsk-python-client-docs ([`b14e779`](https://github.com/kaskada-ai/kaskada/commit/b14e779bd1c4f0cbb6d2d9f9526b14a360f968e4)) + +* Merge pull request #226 from qzg/qzg-docs-patch1 + +Update labels to match visualization and sample code ([`9ed9050`](https://github.com/kaskada-ai/kaskada/commit/9ed9050bde5086760aadb05f815d5a875c2dfc83)) + +* Merge branch 'main' into tsk-python-client-docs ([`b2e4433`](https://github.com/kaskada-ai/kaskada/commit/b2e44333107623d9a0170cdc404a7a4856af052e)) + +* Merge pull request #224 from kaskada-ai/tsk-python-client-release + +(ci): adds python client release and publish to pypi via github workflow ([`397ebb4`](https://github.com/kaskada-ai/kaskada/commit/397ebb411909367087311f613419518fa6df94bc)) + +* Merge branch 'main' into tsk-python-client-docs ([`c5c7e54`](https://github.com/kaskada-ai/kaskada/commit/c5c7e549d0a142c99c42504727b2ca1d6db66920)) + +* Merge branch 'main' into tsk-python-client-release ([`980fc6c`](https://github.com/kaskada-ai/kaskada/commit/980fc6c0246e640842f7c7bf30a5480edf703df1)) + +* update tableSource to source in example yamls ([`a1f9c86`](https://github.com/kaskada-ai/kaskada/commit/a1f9c86c8690926db4147648f81cb0f153a6c9d6)) + +* update tableSource to source in example yaml ([`cdd9227`](https://github.com/kaskada-ai/kaskada/commit/cdd922714b47fe225950dbde7ffe2fe3db5f550b)) + +* Fix small typo ([`a5ac3a6`](https://github.com/kaskada-ai/kaskada/commit/a5ac3a67e0a47dc8b0ccac2b48eef32b26bd4fbc)) + +* Update labels to match visualization and sample code ([`4731b73`](https://github.com/kaskada-ai/kaskada/commit/4731b7386e2b4ef05a1956e47fd5ce1bdc838039)) + +* Merge pull request #225 from kaskada-ai/therapon-patch-1 + +Fix typo in faq.adoc ([`8ac449c`](https://github.com/kaskada-ai/kaskada/commit/8ac449c0cb0eef0757ac58391b59fd03ba6fbe8c)) + +* Fix typo in faq.adoc ([`90eaa6e`](https://github.com/kaskada-ai/kaskada/commit/90eaa6e44841c1458f7bc5c756864046ed0c351a)) + +* Merge branch 'main' into tsk-python-client-docs ([`27ad401`](https://github.com/kaskada-ai/kaskada/commit/27ad4013a2b4009724255f83626ff5246d1ba2da)) + +* Merge remote-tracking branch 'origin/main' into tsk-python-client-release ([`d7f5762`](https://github.com/kaskada-ai/kaskada/commit/d7f57629b804dbed0d66ea684d39918aecf86823)) + +* typo in release docs ([`6251c46`](https://github.com/kaskada-ai/kaskada/commit/6251c468d9167373b064a6f12ff2a4fe41cc3d14)) + +* remove extra command ([`7da2614`](https://github.com/kaskada-ai/kaskada/commit/7da2614e95f4d5a5f31cb965287e15cbb497538d)) + +* updated GitHub release with artifacts ([`964914b`](https://github.com/kaskada-ai/kaskada/commit/964914b016c3959770789f8175505f638351dca4)) + +* specify custom distribution directory ([`2578ed8`](https://github.com/kaskada-ai/kaskada/commit/2578ed8a90867c87635aca4be8115fd84ca24218)) + +* force python version to be 3.7 ([`a264b13`](https://github.com/kaskada-ai/kaskada/commit/a264b133fac13b0d23dff868de191f485fc11d98)) + +* fix paths during verify ([`6623e0e`](https://github.com/kaskada-ai/kaskada/commit/6623e0ed4228cbe1be098c37cfd2bfcd91127a5b)) + +* uses github action to publish to PyPi ([`06b637d`](https://github.com/kaskada-ai/kaskada/commit/06b637d0e0586394fd8b6ed11290e86c79fd2761)) + +* uses github action to publish to PyPi ([`8611404`](https://github.com/kaskada-ai/kaskada/commit/8611404f0bd18409edb3b4f3adb32f4c478ac46b)) + +* moves validation to a separate job ([`f60382b`](https://github.com/kaskada-ai/kaskada/commit/f60382bbbe767b38955de3e362d2a81f0992e568)) + +* fix removal of requirements.txt ([`a04314e`](https://github.com/kaskada-ai/kaskada/commit/a04314ec62e16ec2cddfe8c5c3dbde1c215e303b)) + +* fix error in script ([`50f5008`](https://github.com/kaskada-ai/kaskada/commit/50f50089b04dd56cdecf17652dd2afe44174cdd1)) + +* (ci): adds proto generation to python release flow ([`3bdb6dd`](https://github.com/kaskada-ai/kaskada/commit/3bdb6dd7a501381103db3b26bf5de59b7413b746)) + +* (ci): adds empty workflow for python client to allow testing on branch ([`8367e40`](https://github.com/kaskada-ai/kaskada/commit/8367e40274d430f908945ce9905243cf2e2896a4)) + +* (ci): adds workflow and validation test ([`8e3d35b`](https://github.com/kaskada-ai/kaskada/commit/8e3d35bcc7b152068563cde273f57b48cad5d303)) + +* Python client release 0.1.4 ([`c7ae535`](https://github.com/kaskada-ai/kaskada/commit/c7ae5352543d6320a399a9b5106d0004827d82f5)) + +* Merge pull request #222 from kaskada-ai/tsk-release-0.6.0-1.3 + +release engine 0.6.0, python client 1.3 ([`d986243`](https://github.com/kaskada-ai/kaskada/commit/d986243c20960608a19cd57b28c63ee7f0e0f119)) + +* update cargo.lock ([`3579c26`](https://github.com/kaskada-ai/kaskada/commit/3579c262d1b3726093febddbf7330cb7c5b0cb8d)) + +* release engine 0.6.0, python client 1.3 ([`16ef1c2`](https://github.com/kaskada-ai/kaskada/commit/16ef1c223c9b2fd5b2fa68fef8394fad061f6110)) + +* make mypy happy ([`25f8537`](https://github.com/kaskada-ai/kaskada/commit/25f8537a2f2bf13b4829840649e8a817c35ea744)) + +* (docs): adds pythond documentation generation ([`d483b08`](https://github.com/kaskada-ai/kaskada/commit/d483b08c7bc7148f998a80ef7401cb63da939cdd)) + +* (docs): initial setup for sphinx ([`dfead41`](https://github.com/kaskada-ai/kaskada/commit/dfead41d36723381e10e939876f2f46ce9bcee1a)) + +* Update docs link ([`1781392`](https://github.com/kaskada-ai/kaskada/commit/1781392f976839c1f403ce44dc3cb4ed12dd82f5)) + +* Merge pull request #216 from kaskada-ai/tsk-pr-cleanup + +(tests): cleanup from PR #214 ([`e85d05c`](https://github.com/kaskada-ai/kaskada/commit/e85d05c2b857d7205dc78b19865d3ac013523611)) + +* (tests): cleanup from PR #214 ([`ab725a4`](https://github.com/kaskada-ai/kaskada/commit/ab725a4bb2cc58b132d1819bee078b8564d1aa46)) + +* Merge pull request #214 from kaskada-ai/tsk-pulsar + +(tests): Enables integration tests including Pulsar ([`c89ef7f`](https://github.com/kaskada-ai/kaskada/commit/c89ef7fad6659aa05590537c492e432f3e71e7ee)) + +* Merge remote-tracking branch 'origin/main' into tsk-pulsar ([`7bc6518`](https://github.com/kaskada-ai/kaskada/commit/7bc651845306a566760550f6458cdf45fa8c48ea)) + +* Merge pull request #212 from kaskada-ai/therapon-docs-patch-1 + +(docs): Update hello-world-cli.adoc ([`f773467`](https://github.com/kaskada-ai/kaskada/commit/f773467a0344673749c9aebc423132b84f27edb3)) + +* (docs): Update hello-world-cli.adoc + +We need 2 slashes for path argument. +Though more than 2 still work (Unix systems behave the same no matter the number of leading slashes on a full path) 2 is the requirement for our implementation of CLI (Wren) ([`8088a4e`](https://github.com/kaskada-ai/kaskada/commit/8088a4e2b40e8397196fac9a0f6bebdee5b5a507)) + +* Merge pull request #209 from kaskada-ai/cli/logging + +changed client-id log line to debug-level ([`c3bd5e3`](https://github.com/kaskada-ai/kaskada/commit/c3bd5e37e2a3c49aa4dc9333858277abc0fa00ec)) + +* Merge branch 'main' into cli/logging ([`fa3a097`](https://github.com/kaskada-ai/kaskada/commit/fa3a097b2f417f9be64478de3e8fa0d76979591d)) + +* typo in Makefile ([`ba6c09f`](https://github.com/kaskada-ai/kaskada/commit/ba6c09f5815ef4c13d7d58b3977d89c827529d94)) + +* Merge pull request #207 from kaskada-ai/therapon-docs-cli-full-path-to-file + +(docs): Update hello-world-cli.adoc ([`e30133d`](https://github.com/kaskada-ai/kaskada/commit/e30133d9da5982d13da40f52341179881b1dc7a2)) + +* clean up ([`1ce34fb`](https://github.com/kaskada-ai/kaskada/commit/1ce34fb5ea8ae5e923b905cd2e206f74348003ab)) + +* changed client-id log line to debug-level ([`684d334`](https://github.com/kaskada-ai/kaskada/commit/684d334e8deea267ea39178b21ae79fe07de5d9b)) + +* remove copy of integ test binary ([`f2c2d23`](https://github.com/kaskada-ai/kaskada/commit/f2c2d2302f32d8e3197c3ab3b3c094165cd2850b)) + +* Merge pull request #144 from kaskada-ai/feature/object-store + +feat: Add Object Store Crate to Engine ([`087aa16`](https://github.com/kaskada-ai/kaskada/commit/087aa16a29c6cbad8703ef574fd87dbeebef4e3f)) + +* clippy ([`7287b18`](https://github.com/kaskada-ai/kaskada/commit/7287b18c47420ebba3c70312b3f6d55feabcf229)) + +* cargo fmt ([`de7f7db`](https://github.com/kaskada-ai/kaskada/commit/de7f7db265d68de1c5fb5d4a20f84637112f6965)) + +* added back convert uri ([`644f031`](https://github.com/kaskada-ai/kaskada/commit/644f031a2c1f5c4ae7a90bf6d5d2a900a68d680e)) + +* Merge branch 'main' into feature/object-store ([`591fdec`](https://github.com/kaskada-ai/kaskada/commit/591fdec96176bfb8c4cb087f0dd2ebfef1e56d3d)) + +* add ci_engine.yml to do not skip list ([`97a26c6`](https://github.com/kaskada-ai/kaskada/commit/97a26c67c818dbf50b32ba5c9fb9fd43f56f06eb)) + +* final flow testing ([`e7927c1`](https://github.com/kaskada-ai/kaskada/commit/e7927c1d0a67b2cfdb5dc86f8d3b4ed9e509b9fb)) + +* fix args to chown command ([`1a274c2`](https://github.com/kaskada-ai/kaskada/commit/1a274c2ac8a6369310d474b73c974e2074d7e4de)) + +* set the ownership and permissions for db file ([`d3d23c1`](https://github.com/kaskada-ai/kaskada/commit/d3d23c167a85b8370d8059fb8b87d046d2b7522d)) + +* update permissions to data from within docker ([`3d69e1a`](https://github.com/kaskada-ai/kaskada/commit/3d69e1af11facc56e7fe4516affe4e6c34ba1204)) + +* update permissions to data from within docker ([`2d1888f`](https://github.com/kaskada-ai/kaskada/commit/2d1888fb4bd7793975df15583e9d9829decd2a19)) + +* fix yaml error ([`d81eb79`](https://github.com/kaskada-ai/kaskada/commit/d81eb79542d9e93a10cda2fcfe44f48c7cea17e0)) + +* fix yaml error ([`a55e04f`](https://github.com/kaskada-ai/kaskada/commit/a55e04f1f9989321ec45bfbb3da1a5fe9157fb17)) + +* add debugging info on data directory ([`faadd58`](https://github.com/kaskada-ai/kaskada/commit/faadd588c28e4d20637b6d53f239cd1b70379a13)) + +* wait up to 4 minutes for docker compose containers to be healthy ([`6b8c808`](https://github.com/kaskada-ai/kaskada/commit/6b8c808c4d07c52a27ea81bc122cf077459d2b6f)) + +* CI breaks if /data is not rw ([`2adbdcf`](https://github.com/kaskada-ai/kaskada/commit/2adbdcf6d551fc0ac8965a8400cf2ecacf3b683c)) + +* increase wait time ([`6f2f4ff`](https://github.com/kaskada-ai/kaskada/commit/6f2f4ff243748eb41c0e63e747affc4372ceb076)) + +* increase wait time ([`0002a45`](https://github.com/kaskada-ai/kaskada/commit/0002a4586c8f9809166fd939650a19d045ccbdf1)) + +* add name of container to grep ([`163856f`](https://github.com/kaskada-ai/kaskada/commit/163856fde64addb97f2967e7457d1f53066ac98a)) + +* (ci): testing docker compose setup ([`f7d49e8`](https://github.com/kaskada-ai/kaskada/commit/f7d49e81cc91bc2582e313e78cad2d4bd609a761)) + +* (docs): Update hello-world-cli.adoc + +Adds the full path to the downloaded parquet file when calling the cli (#202) ([`9955475`](https://github.com/kaskada-ai/kaskada/commit/9955475a6248f46a74d083749f40fa3884b98caf)) + +* Merge pull request #121 from kaskada-ai/pulsar + +Pulsar ([`79b2d94`](https://github.com/kaskada-ai/kaskada/commit/79b2d94eefdd0337218473a67bd9bd319ad03abe)) + +* Merge branch 'feature/object-store' of github.com:kaskada-ai/kaskada into feature/object-store ([`9b7f94c`](https://github.com/kaskada-ai/kaskada/commit/9b7f94c133a746dd2544eb860caa41c5ad70df71)) + +* fixed test ([`692c0d9`](https://github.com/kaskada-ai/kaskada/commit/692c0d9e5831257cb04db21b5f3897682718efd4)) + +* Fix build/clippy ([`da44646`](https://github.com/kaskada-ai/kaskada/commit/da44646499ec9005afd79e2b8dcd9b76e00c6640)) + +* Merge branch 'main' into feature/object-store ([`143b6e7`](https://github.com/kaskada-ai/kaskada/commit/143b6e7812f19775244ab366d7ce4713572a804a)) + +* clippy ([`ca5bb6e`](https://github.com/kaskada-ai/kaskada/commit/ca5bb6e518180e54eadd65f9c072d0ea48a5bf14)) + +* Merge pull request #203 from kaskada-ai/rm/docs-example-links + +Add an "examples" section to the docs with links to our notebooks ([`e647493`](https://github.com/kaskada-ai/kaskada/commit/e647493cec03d97aac5e6589be0663762434f10b)) + +* Merge pull request #204 from kaskada-ai/rm/docs-split-integrations + +Promote the subsections of "integrating with feature stores" to be top-level ([`161f3ff`](https://github.com/kaskada-ai/kaskada/commit/161f3ffea648666a4ef850eb29f7e9a5de598625)) + +* Promote the subsections of "integrating with feature stores" to be top-level + +This improves discoverability of the different systems we integrate +with. This change also changes some verbage from talking about +"features" to talking about "query results". + +NOTE: This section of the docs needs more work - this PR is just +intended to cover the navigation changes. ([`cd31e33`](https://github.com/kaskada-ai/kaskada/commit/cd31e33a1e186f05cd8c8d74e49672464b68cd1c)) + +* Add an "examples" section to the docs with links to our notebooks + +This also makes some minor fixes to the notebooks to reflect the fact +that the project has been released publicly. ([`d1a451a`](https://github.com/kaskada-ai/kaskada/commit/d1a451a06675602c132359973808ba45b37c9d9a)) + +* skip pulsar integration tests for now ([`ca903a4`](https://github.com/kaskada-ai/kaskada/commit/ca903a467a259b9be35a3f544a708afe1b5d13a2)) + +* context switch -- incomplete code ([`75654a8`](https://github.com/kaskada-ai/kaskada/commit/75654a87b781d796722ede28958e14fd849b2166)) + +* remove csvs ([`bd5c50e`](https://github.com/kaskada-ai/kaskada/commit/bd5c50ee73d8380a0392c7fca9f9db7ae26a0faf)) + +* remove ml example ([`b72ee6d`](https://github.com/kaskada-ai/kaskada/commit/b72ee6db74793ab365303984106f3c8f3d990c05)) + +* Merge branch 'main' into feature/object-store ([`f8ee648`](https://github.com/kaskada-ai/kaskada/commit/f8ee648be6f177df3d4e0039f50a2462d26708fc)) + +* add top level span to compute service calls ([`1084ce0`](https://github.com/kaskada-ai/kaskada/commit/1084ce00c240f9546eb04f362255da123c321dfb)) + +* fix entrypoint command ([`b5e6d64`](https://github.com/kaskada-ai/kaskada/commit/b5e6d6446826edf007bd8ac40524df1dbfc88253)) + +* use docker buildkit ([`ad4dbb0`](https://github.com/kaskada-ai/kaskada/commit/ad4dbb01a876f463b56968cf2860b9c1b688785f)) + +* use docker buildx ([`b5acd8a`](https://github.com/kaskada-ai/kaskada/commit/b5acd8af805507dd6dfbbad66139452e7537e78c)) + +* Use in_current_span over instrument ([`aa6b843`](https://github.com/kaskada-ai/kaskada/commit/aa6b8437b60dabb55b379ed48c3ce8acc8932437)) + +* use correct docker-compose file ([`0f70cca`](https://github.com/kaskada-ai/kaskada/commit/0f70cca83accb0d01860ce73d9d245873fd850fb)) + +* fix paths ([`bffed47`](https://github.com/kaskada-ai/kaskada/commit/bffed47c50e904ea9ab0a1eb6d31a2e4d5525c84)) + +* revert docker image to bullseye-slim ([`a769175`](https://github.com/kaskada-ai/kaskada/commit/a769175669c0443ba00665c1588e0f05d5e79c5d)) + +* (ci): testing docker compose setup ([`1c1bf48`](https://github.com/kaskada-ai/kaskada/commit/1c1bf4818172594ae053fb1398908fc9cb72dcab)) + +* cargo fmt ([`7068048`](https://github.com/kaskada-ai/kaskada/commit/7068048aaaae6e9749f1a078bcf00d2b7ac5d130)) + +* debugging integ test ([`51a41ef`](https://github.com/kaskada-ai/kaskada/commit/51a41efda3bc94c498649108feb6dbaf523d7971)) + +* Merge pull request #200 from kaskada-ai/tsk-docs-cli-fix-links + +(docs): fix install command for downloading binaries on Linux and OSX ([`e5ebfa8`](https://github.com/kaskada-ai/kaskada/commit/e5ebfa86f7ea3a88593559b69b3f7341c8d9b067)) + +* Merge branch 'main' into feature/object-store ([`bee252c`](https://github.com/kaskada-ai/kaskada/commit/bee252c6218b731dc178c0b625c8fc6e38e485dc)) + +* (docs): fix install command for downloading binaries on Linux and OSX ([`a275f7e`](https://github.com/kaskada-ai/kaskada/commit/a275f7e548ee4e5be3a1ee5e2702fa376769e023)) + +* (docs): fix install command for downloading binaries on Linux and OSX ([`13ccd56`](https://github.com/kaskada-ai/kaskada/commit/13ccd5621d798eb4d04d0b8c8084f7ada50baa5a)) + +* remove logging ([`10f30a9`](https://github.com/kaskada-ai/kaskada/commit/10f30a90cf3367acce53ddbc0b8b67cd9b588a12)) + +* Attempt to fix prepare span tracing ([`6a8c4d1`](https://github.com/kaskada-ai/kaskada/commit/6a8c4d192cf6ef1d14b71087dc052aafe7030b88)) + +* Merge pull request #198 from kaskada-ai/tsk-docs-cli-fix-links + +(docs): fix release links ([`9d01b06`](https://github.com/kaskada-ai/kaskada/commit/9d01b06d3acd8a1904b432c88d2375133fba0b0d)) + +* (docs): fix release links ([`8af2832`](https://github.com/kaskada-ai/kaskada/commit/8af2832b8bc2adeb73233719919509e35dc6bae0)) + +* Merge pull request #123 from kaskada-ai/helm/canary + +created initial canary helm chart ([`05e9eac`](https://github.com/kaskada-ai/kaskada/commit/05e9eacb11094ed901bd4341b594ec4f1c3766f8)) + +* update ci ([`05ebfea`](https://github.com/kaskada-ai/kaskada/commit/05ebfeaba6e317c619ed134a085fad3bde95a9ab)) + +* Remove lingering uses of cert chain in python ([`6f962a5`](https://github.com/kaskada-ai/kaskada/commit/6f962a5c65b8bc74534285a2cbd9eeb077018850)) + +* remove allowed security advisories ([`428ae2f`](https://github.com/kaskada-ai/kaskada/commit/428ae2f18097b859955aa9b8e09f7c04e4fe6cc5)) + +* Merge pull request #197 from kaskada-ai/remove-cert-and-auth + +remove cert chain and redact auth from log ([`d17b016`](https://github.com/kaskada-ai/kaskada/commit/d17b0166527ac134961a9f4e663865ced55e427d)) + +* remove cert chain and redact auth from log ([`1ea69cf`](https://github.com/kaskada-ai/kaskada/commit/1ea69cf167ec57bad98d05a9435f41319f0d9bb1)) + +* merge async_stream_reader ([`d6b813b`](https://github.com/kaskada-ai/kaskada/commit/d6b813b4f9e9745b5d34694238ed86fff8293f99)) + +* Merge pull request #196 from kaskada-ai/tsk-fix-install-instructions + +(docs): update cli install instaructions ([`ae4ea92`](https://github.com/kaskada-ai/kaskada/commit/ae4ea92fbc1dc19cab5b815723db364aa5639726)) + +* rewrite prepare_file to use filter_map and unzip instead of manually pushing into arc-mutex-vec ([`8d58743`](https://github.com/kaskada-ai/kaskada/commit/8d58743cdf0d93df959d4c1fa1638c8ca505fa90)) + +* (docs): update cli install instaructions ([`6e25348`](https://github.com/kaskada-ai/kaskada/commit/6e253489fe4fe1f7298d10d5366abcc3e3689c26)) + +* readme updates ([`adc0f80`](https://github.com/kaskada-ai/kaskada/commit/adc0f80184aad953fe4e1ce6af99406ba34f6576)) + +* added first snapshot unittest ([`390a821`](https://github.com/kaskada-ai/kaskada/commit/390a821fed4973bf795642a00b41aa21229c757f)) + +* added linting and validation ([`24e4946`](https://github.com/kaskada-ai/kaskada/commit/24e4946fb9c959e7198f588ae5e1ed6222a78d76)) + +* added storage and secrets ([`cc1879c`](https://github.com/kaskada-ai/kaskada/commit/cc1879cc49b5f01fbe17f5516f9e633768c77bce)) + +* example with try unfold ([`5f9a96b`](https://github.com/kaskada-ai/kaskada/commit/5f9a96b28408e18e3a87171a1ec8799f141aa524)) + +* example: use async stream directly ([`eb4a675`](https://github.com/kaskada-ai/kaskada/commit/eb4a675f1854f86beafb70648809f6ab18676c43)) + +* implement Stream for PulsarReader using async_stream::stream! ([`4790f1c`](https://github.com/kaskada-ai/kaskada/commit/4790f1c3443546190635887f001c2a01236c507b)) + +* Merge pull request #161 from kaskada-ai/docs/notebook-demo-narrative + +Demo notebook with a narrative ([`3061bb1`](https://github.com/kaskada-ai/kaskada/commit/3061bb113cc421efe434d0d10003214ae4542504)) + +* Merge pull request #193 from kaskada-ai/tsk-link-fix + +(docs) fix broken link ([`e75184e`](https://github.com/kaskada-ai/kaskada/commit/e75184ecdf19182ee9c512d61a44e924921a6447)) + +* fix broken link ([`c957809`](https://github.com/kaskada-ai/kaskada/commit/c95780923a1eebeedfefa5d7b886e0d1708115ef)) + +* Merge pull request #191 from kaskada-ai/tsk-hellow-world + +(docs): Updates paragraph around hello world links ([`0169037`](https://github.com/kaskada-ai/kaskada/commit/01690379655589c76fef1de71e5367af585a6c31)) + +* Merge branch 'main' into tsk-hellow-world ([`17adcc1`](https://github.com/kaskada-ai/kaskada/commit/17adcc1d82c0e92795abf3e48c581396f977836d)) + +* adds links to what-is-kaskada page ([`3ec0182`](https://github.com/kaskada-ai/kaskada/commit/3ec0182144c0ebe914978e8334e33a65cf066330)) + +* created initial canary helm chart ([`fcb2b87`](https://github.com/kaskada-ai/kaskada/commit/fcb2b873cf8d91fa00582c059747c65c7ad89bd4)) + +* Merge pull request #192 from kaskada-ai/puslar/sensitive + +marked the pulsar auth_params field as sensitive ([`a5b250e`](https://github.com/kaskada-ai/kaskada/commit/a5b250e261ca33c4914da6133a4b750e0fee6845)) + +* marked the pulsar auth_params field as sensitive ([`ce5be18`](https://github.com/kaskada-ai/kaskada/commit/ce5be1804eaf5c35277c6cef2ae5555d69b47ea5)) + +* clippy ([`1de3e30`](https://github.com/kaskada-ai/kaskada/commit/1de3e30ec25771ac0b737cd1b81f9a36e3889658)) + +* Merge pull request #184 from kaskada-ai/add-labels-to-template + +Update issue templates ([`77302dc`](https://github.com/kaskada-ai/kaskada/commit/77302dc3e7cbe00a8fc2d5afc3288531af8d3720)) + +* Removing cell for private auth, fixing some typos. ([`55b477f`](https://github.com/kaskada-ai/kaskada/commit/55b477fca03f227f3a51503f28fa7cdd3e28eaa1)) + +* code review comments/mpl2.0 ([`c95c74c`](https://github.com/kaskada-ai/kaskada/commit/c95c74c790fa78cd038fab46a7377c37737c24ef)) + +* cargo fmt ([`faad7b0`](https://github.com/kaskada-ai/kaskada/commit/faad7b0d7dc9dc2c6fff2e366b495d5f3d8ddade)) + +* more clippy ([`b231901`](https://github.com/kaskada-ai/kaskada/commit/b2319015a204b03ff36571c2543187cf5ed1ed30)) + +* more todo fixes ([`617f4b6`](https://github.com/kaskada-ai/kaskada/commit/617f4b61a7bdbe365aaa61c4402230dcc20ab7e2)) + +* unwrap fixes ([`422cc42`](https://github.com/kaskada-ai/kaskada/commit/422cc42be759fb96b5e6ad7428d6645854779988)) + +* make proto lint happy ([`3fc7188`](https://github.com/kaskada-ai/kaskada/commit/3fc7188bbd543afccb0b617a364def2aaf4f47c6)) + +* make proto lint happy ([`ebd4ca9`](https://github.com/kaskada-ai/kaskada/commit/ebd4ca9fff89ec7979200c3b1742f211560a0725)) + +* minor tweaks ([`807bdfa`](https://github.com/kaskada-ai/kaskada/commit/807bdfafa011af0a75dd27a9b5cf25acf45cc76b)) + +* minor tweaks ([`f627f34`](https://github.com/kaskada-ai/kaskada/commit/f627f34f3d83b4411a99108fd5c85e6c50d1e8cd)) + +* minor tweaks ([`8c77733`](https://github.com/kaskada-ai/kaskada/commit/8c777332bdb8a450009cef311edbe7a23eae60c3)) + +* updated to use local file ([`05cac0e`](https://github.com/kaskada-ai/kaskada/commit/05cac0e5fdc500733c9cbf0b56278d2868fe5c60)) + +* minor tweaks ([`6254396`](https://github.com/kaskada-ai/kaskada/commit/62543962b8ce8552aa31339e983e943c0432e4a2)) + +* (docs): Updates paragraph around hello world links ([`148d424`](https://github.com/kaskada-ai/kaskada/commit/148d424add0540dbbf4b816071aaf30621fb0b82)) + +* merge from main ([`a4c50b7`](https://github.com/kaskada-ai/kaskada/commit/a4c50b7d93e6e0fcd09628bf611eb771a4d280d0)) + +* Merge branch 'main' into feature/object-store ([`5a1c861`](https://github.com/kaskada-ai/kaskada/commit/5a1c8612a7a7c00997cbada63e62619c8d207222)) + +* clean up Stream impl for PrepareIter ([`ab68f5c`](https://github.com/kaskada-ai/kaskada/commit/ab68f5ca1c1302e3b5a74d3ddfbccc78fef5a298)) + +* simplify PulsarReader::poll_next ([`8c7be5d`](https://github.com/kaskada-ai/kaskada/commit/8c7be5d0abc07e9d313b96212ce13c274b67986b)) + +* fixing artifact name ([`39f8043`](https://github.com/kaskada-ai/kaskada/commit/39f8043ff27983cce5bf824e76102eeb40dc221b)) + +* Merge pull request #189 from kaskada-ai/tsk-revert-ubuntu22 + +fix artifact names ([`156a8e6`](https://github.com/kaskada-ai/kaskada/commit/156a8e6bef11f8a3b5cfccd8e0192e142bbb17ed)) + +* fix artifact names ([`a4bbd7f`](https://github.com/kaskada-ai/kaskada/commit/a4bbd7f2b4b5df89ac6241dcde693d2ba7d74201)) + +* Merge pull request #186 from kaskada-ai/tsk-revert-ubuntu22 + +Reverts builds on ubuntu 22.04 ([`a535f9d`](https://github.com/kaskada-ai/kaskada/commit/a535f9d1fdd38a04036537db3d9d89341aab04b7)) + +* Reverts builds on ubuntu 22.04 ([`138551d`](https://github.com/kaskada-ai/kaskada/commit/138551d1931f12bc1a416a65a199747dc09caf37)) + +* Merge pull request #185 from kaskada-ai/tsk-release-fix + +fix path for cli binary ([`084cc19`](https://github.com/kaskada-ai/kaskada/commit/084cc19c30454a54b732a6f7404d55bc98706f93)) + +* fix path for cli binary ([`c6a903a`](https://github.com/kaskada-ai/kaskada/commit/c6a903a595833efc7c3910c9c2b1dc4cb71758ec)) + +* fix path for cli binary ([`10831cc`](https://github.com/kaskada-ai/kaskada/commit/10831cc30d36411b08c7839b99b5ff9d79ac4d78)) + +* Merge branch 'pulsar' of github.com:kaskada-ai/kaskada into pulsar ([`b278cdd`](https://github.com/kaskada-ai/kaskada/commit/b278cdd6f72d16aed3e9f573c09650a8155309c6)) + +* remove block_on from pulsar client, making PrepareIter implement Stream instead of Iter ([`46f3691`](https://github.com/kaskada-ai/kaskada/commit/46f3691517d38b3314261b621915ead8d01fb62d)) + +* code review comments/clippy ([`5185630`](https://github.com/kaskada-ai/kaskada/commit/5185630f2b4e67fc5f7058e9accf7cb192000045)) + +* code review comments/context ([`3325acb`](https://github.com/kaskada-ai/kaskada/commit/3325acbdb6ea44a75b957049caa43932e222eb3b)) + +* code review comments/attach printable lazy ([`78f5d9f`](https://github.com/kaskada-ai/kaskada/commit/78f5d9f627ce6e2337b7a1eab700853d7550112b)) + +* code review comments/update cargo workspace ([`9a0836d`](https://github.com/kaskada-ai/kaskada/commit/9a0836d1998c5745da56b7b83ea72fc19a900d97)) + +* Merge pull request #182 from kaskada-ai/pulsar-fix-temp-file-deletion + +fix: fix temp file deletion ([`0f6e0bd`](https://github.com/kaskada-ai/kaskada/commit/0f6e0bd366e9ee09396d77435706139d6a57e36a)) + +* Merge branch 'main' into feature/object-store ([`ef2eea2`](https://github.com/kaskada-ai/kaskada/commit/ef2eea2a150c3dc95a3b425db241e6cb4962c524)) + +* added implementation more ([`ed54c47`](https://github.com/kaskada-ai/kaskada/commit/ed54c476b3eb9238d3e1c1838deb56cbcf10fece)) + +* Update feature-report.md ([`3c4de7d`](https://github.com/kaskada-ai/kaskada/commit/3c4de7d062021ac0141e075c5498cd236a536d1d)) + +* Update bug_report.md ([`ce56447`](https://github.com/kaskada-ai/kaskada/commit/ce5644716c90e402e65e3ed3d37dc731e87e38fe)) + +* Merge pull request #183 from kaskada-ai/tsk-ci-release-libssl3 + +(ci): adds ubuntu-22.04 to the release matrix ([`376165b`](https://github.com/kaskada-ai/kaskada/commit/376165b9f5d818dee433690501538c6161a2eb26)) + +* (ci): adds ubuntu-22.04 to the release matrix ([`d598a91`](https://github.com/kaskada-ai/kaskada/commit/d598a91616700c6e8ca2b1cf9896387224994490)) + +* Explicitly close the temp file ([`474e50a`](https://github.com/kaskada-ai/kaskada/commit/474e50abcde9a69a2b8295b6f84f3ec550170454)) + +* Merge pull request #179 from kaskada-ai/updates + +build: update dependencies ([`d796242`](https://github.com/kaskada-ai/kaskada/commit/d796242771cfeed31e094b501cf8944d8aa63c76)) + +* Merge pull request #177 from kaskada-ai/more-readme-changes + +add docs to readme ([`c475727`](https://github.com/kaskada-ai/kaskada/commit/c4757277480d1af7d834c4cca7d6b750ca812f7d)) + +* add docs to readme ([`dd9fc45`](https://github.com/kaskada-ai/kaskada/commit/dd9fc4521be11386fa0e8722fc6e72838e3a5aed)) + +* Merge pull request #176 from kaskada-ai/pyton/astra-streaming + +updated python client for new pulsar protos ([`3507566`](https://github.com/kaskada-ai/kaskada/commit/35075663888b5bf14eb8ea1cc4bd95a041b125cb)) + +* ran format ([`28717d9`](https://github.com/kaskada-ai/kaskada/commit/28717d94aaf321eba113419a5ae11ba88daf5bab)) + +* updated python client for new pulsar protos ([`490b465`](https://github.com/kaskada-ai/kaskada/commit/490b46544680a3cd8a59394470acb504ff11d64b)) + +* Merge pull request #171 from kaskada-ai/tsk-release-cli + +(ci): build and release kaskada CLI ([`1ceb674`](https://github.com/kaskada-ai/kaskada/commit/1ceb674df77a507b1bd9d78da98a16e39de5aae2)) + +* Merge pull request #175 from kaskada-ai/feature/release-pypi-0.1.2 + +PyPi Release 0.1.2 ([`9053e5a`](https://github.com/kaskada-ai/kaskada/commit/9053e5a69672b98b0201ab4d39ecba6890d3f72e)) + +* 0.1.2 with email ([`09f7899`](https://github.com/kaskada-ai/kaskada/commit/09f7899f02d1f82525f62922cbc028bc4fa9d4e4)) + +* Merge branch 'main' into tsk-release-cli ([`6edd69c`](https://github.com/kaskada-ai/kaskada/commit/6edd69c0d251a755261dbcd267774aef674a1938)) + +* Merge pull request #170 from kaskada-ai/tsk-ci-fixes + +(ci): fixes name path and git safe directory ([`a5da7fe`](https://github.com/kaskada-ai/kaskada/commit/a5da7fe7611841029906a7a44c513c79f42e85e4)) + +* Merge branch 'main' into tsk-release-cli ([`d7ed44c`](https://github.com/kaskada-ai/kaskada/commit/d7ed44c29b52718cc37f33d2af54162baa995190)) + +* Merge branch 'main' into tsk-ci-fixes ([`2874d6c`](https://github.com/kaskada-ai/kaskada/commit/2874d6cf5ccf7413a43c274ad61fc56eed876dfb)) + +* Merge pull request #174 from kaskada-ai/getting-started-docs + +fix nav for getting started ([`94af2d5`](https://github.com/kaskada-ai/kaskada/commit/94af2d50ea128a222a9225b0741999446a3a1a06)) + +* fix nav for getting started ([`0b27eb9`](https://github.com/kaskada-ai/kaskada/commit/0b27eb914b2ed66696fc29d1dee3878c1a4c502d)) + +* Merge pull request #173 from kaskada-ai/getting-started-docs + +docs: add a getting started page ([`ec90e0f`](https://github.com/kaskada-ai/kaskada/commit/ec90e0f8deec85dc1d8dc1d70446085d378dbf50)) + +* Merge branch 'main' into getting-started-docs ([`622d50f`](https://github.com/kaskada-ai/kaskada/commit/622d50fef5d4b218f8b7cfc8692d2b23fc0bbe20)) + +* Merge pull request #172 from kaskada-ai/fix-readme + +docs: update readme ([`8516bd1`](https://github.com/kaskada-ai/kaskada/commit/8516bd1a74e97e9cc51e5b4fc7235368b78ac5fe)) + +* add some bolding ([`158d5da`](https://github.com/kaskada-ai/kaskada/commit/158d5da092171546269ecd18f1f709f787e1f75e)) + +* (ci): build and release kaskada CLI ([`2c6f4e6`](https://github.com/kaskada-ai/kaskada/commit/2c6f4e6f532870840cc9d465e117732fa418749a)) + +* Merge branch 'main' into tsk-ci-fixes ([`2dc6292`](https://github.com/kaskada-ai/kaskada/commit/2dc62925e47c983c0420c4ec881b21d9ed0d75ac)) + +* expose get_pulsar_schema as async, which indirectly makes prepare_file and prepared_batches async ([`310354d`](https://github.com/kaskada-ai/kaskada/commit/310354d190a6964bf65bc38c0f25056c8a231734)) + +* (ci) fixes name path and git safe directory ([`14d5948`](https://github.com/kaskada-ai/kaskada/commit/14d5948d0fb655c4484b0c2ec9e7398fb822a9f9)) + +* Merge pull request #166 from kaskada-ai/ben/update-benchmark + +ref: update transaction benchmark instructions; examples ([`2ada1c6`](https://github.com/kaskada-ai/kaskada/commit/2ada1c672eac8d02636a584da328c4b96eb42dbb)) + +* Merge pull request #163 from kaskada-ai/update-jupyter-docker-image + +Update pulsar docker compose with new image ([`b99744c`](https://github.com/kaskada-ai/kaskada/commit/b99744cafd76fdd87cc41616c366b019c867cb32)) + +* Merge pull request #167 from kaskada-ai/tsk-ci-mulitplatform-docker-images + +(ci): adds multiplatform docker image for linux amd64 and arm64 ([`12a4e07`](https://github.com/kaskada-ai/kaskada/commit/12a4e07ba950f8a0e6b2a5118c33bf3b9ab18d1d)) + +* Merge pull request #162 from kaskada-ai/pulsar-materialization-integ-tests + +test: add basic pulsar materialization integration test ([`e5a08ce`](https://github.com/kaskada-ai/kaskada/commit/e5a08ce3adefec9b988a6b141f0de52eade3bde1)) + +* address comments ([`f18176f`](https://github.com/kaskada-ai/kaskada/commit/f18176fdecd56e905acfcfbec82b52157906ffa3)) + +* demo state ([`02e1edd`](https://github.com/kaskada-ai/kaskada/commit/02e1edd3c30b581159e18a5b42a4c8cdf557d1b9)) + +* Merge pull request #168 from kaskada-ai/release/0.5.0 + +Release 0.5.0 ([`7be3ba9`](https://github.com/kaskada-ai/kaskada/commit/7be3ba99b95a2af9216c465a369b83ad53a6f9d6)) + +* Release 0.5.0 ([`4a9ba16`](https://github.com/kaskada-ai/kaskada/commit/4a9ba16491d31f5a64563dc83e0eca2fc3cdcd5a)) + +* cleanup ([`7dd47e5`](https://github.com/kaskada-ai/kaskada/commit/7dd47e50c336ae5135fa674bc2d4caf06f15abd0)) + +* final multiplatform docker image release ([`8523158`](https://github.com/kaskada-ai/kaskada/commit/8523158cb388c698da23a99f878495766e04bd04)) + +* update transaction benchmark instructions; examples ([`4ec6b93`](https://github.com/kaskada-ai/kaskada/commit/4ec6b93c272586956d2a71c8b22335d3e840bf3e)) + +* Pass temp files to method so they stay in scope ([`f24b309`](https://github.com/kaskada-ai/kaskada/commit/f24b309607cea6cbe02162e70b668e04097839fd)) + +* Update pulsar docker compose with new image ([`66356fc`](https://github.com/kaskada-ai/kaskada/commit/66356fce0bca99f3c382ba3d22e00ffb6585812c)) + +* Merge pull request #164 from kaskada-ai/tsk-notebooks-remove-github-token-code + +(examples): removes code for GitHub Authtoken ([`cdc0854`](https://github.com/kaskada-ai/kaskada/commit/cdc08545ccbecfc1d1fab0cd33f3321f8b060ecb)) + +* inline maybe_download_file to avoid the temp file getting deleted prematurely ([`892a904`](https://github.com/kaskada-ai/kaskada/commit/892a9048d6ef3adc5cb45f766e03627fc8f3ff0c)) + +* adds deny entries for ssl brought from pulsar client ([`27c5046`](https://github.com/kaskada-ai/kaskada/commit/27c50461ddf6dd337513ddfd57498cd8c5be678b)) + +* removed todo ([`06fe32a`](https://github.com/kaskada-ai/kaskada/commit/06fe32a2ad18e2a3db94d4468ecf1a01b8b17680)) + +* added more description ([`012d9c8`](https://github.com/kaskada-ai/kaskada/commit/012d9c880f7adb6e27af8b458b015ea9263d5e88)) + +* address comments ([`9ddc78a`](https://github.com/kaskada-ai/kaskada/commit/9ddc78a4a6f08f7f685cba4bada2e3a44fa70d44)) + +* address comments ([`b223f2e`](https://github.com/kaskada-ai/kaskada/commit/b223f2eabdd75af3b62b4a17815a97dfd485b295)) + +* code review comments/removed comment around unwrap ([`5f03736`](https://github.com/kaskada-ai/kaskada/commit/5f037361da0d72efbc7f230cf72b9dd9936e11d7)) + +* remove Untitled.ipynb file ([`91a6d03`](https://github.com/kaskada-ai/kaskada/commit/91a6d03c3e85c290b7fae75ee63a0ba5dce6a53c)) + +* (examples): removes code for GitHub Authtoken ([`0b5b045`](https://github.com/kaskada-ai/kaskada/commit/0b5b045468e2e5cef67cc2925369bdf941b9312f)) + +* context switch -- incomplete code ([`d0533d0`](https://github.com/kaskada-ai/kaskada/commit/d0533d0106a56ae0604fe2c7b3b86ca9c07551ce)) + +* context switch -- incomplete code ([`c97a67e`](https://github.com/kaskada-ai/kaskada/commit/c97a67eaa7781011cbf56b09e6d6f5560d603d5f)) + +* code review comments/registry comment ([`6aa8141`](https://github.com/kaskada-ai/kaskada/commit/6aa8141f5efd5d368fe4f3a2b6d945f59b5cc042)) + +* code review comments/s3a ([`c400fb9`](https://github.com/kaskada-ai/kaskada/commit/c400fb9e36666b3e5c0bad304d76791e189cec50)) + +* Add pulsar materialization integration test ([`e14b745`](https://github.com/kaskada-ai/kaskada/commit/e14b745e8f22c0cf9f2e0761ae4884807b1621c0)) + +* testing workflow ([`65fcb23`](https://github.com/kaskada-ai/kaskada/commit/65fcb23432152e1db6f9c25f3e0dd0784d12ca81)) + +* testing workflow ([`ebc915e`](https://github.com/kaskada-ai/kaskada/commit/ebc915ea9cc520ad5f11847ecd5338f1ddf6994e)) + +* testing workflow ([`706b91f`](https://github.com/kaskada-ai/kaskada/commit/706b91f4048c2c7374e191c6cc3ecf56a2d7e9d5)) + +* testing workflow ([`bd9d81a`](https://github.com/kaskada-ai/kaskada/commit/bd9d81a2a827d3deffd0cbf09ae33cab203cfdd3)) + +* testing workflow ([`bb62e25`](https://github.com/kaskada-ai/kaskada/commit/bb62e25d66a349e2012e232fe131a8b82dac8f65)) + +* Removing some dev details better found elsewhere. ([`e6d8ce7`](https://github.com/kaskada-ai/kaskada/commit/e6d8ce7954aa019bf16b6df9a8ad0714d4ece8c1)) + +* Merge branch 'main' into docs/notebook-demo-narrative ([`b6707e9`](https://github.com/kaskada-ai/kaskada/commit/b6707e902858a91921ee754af985ab3f3d51a16e)) + +* testing install of ubuntu deps for multiplatform docker ([`957f259`](https://github.com/kaskada-ai/kaskada/commit/957f2597464b712607aa97407fe4325b6df2c230)) + +* testing install of ubuntu deps for multiplatform docker ([`487d0d3`](https://github.com/kaskada-ai/kaskada/commit/487d0d3203b4e23f2cd18a35c5ea84e6db98ca01)) + +* comments ([`d1c57e4`](https://github.com/kaskada-ai/kaskada/commit/d1c57e4e8a7669194d135bfcbd4e8393abe8e06a)) + +* cleanup ([`7d844e2`](https://github.com/kaskada-ai/kaskada/commit/7d844e27da77e77fe3477ffc3537776789fc6b35)) + +* move _publish_time into raw_metadata; switch back to using raw_metadata in PrepareIter. this is more correct, since as the decimal_ tests illustrate, the raw metadata corresponds to what is actually in the source we're reading from. + +this means we also need to preserve the raw user_schema for use by the Pulsar consumer. ([`9425cfd`](https://github.com/kaskada-ai/kaskada/commit/9425cfd6e3dc127e250050b3b60a2251bdb25662)) + +* Revert "update the decimal_fixed_len test files to be well-behaved sparrow inputs. (no timezone on the timestamp, and no decimal columns)" + +This reverts commit afd7f55ee6980c7b12d508c5a1e796b0a0679bd1. ([`fb7b712`](https://github.com/kaskada-ai/kaskada/commit/fb7b712e2efa410769ff35d18bccadddd43e4178)) + +* rewrite comment explaining the consumer timeout problem ([`8927978`](https://github.com/kaskada-ai/kaskada/commit/8927978555ca47f96512a8e570ba976f787c325b)) + +* Merge branch 'main' into feature/object-store ([`0c52402`](https://github.com/kaskada-ai/kaskada/commit/0c52402ce6b6ba12df5a94feee047852678ce7ae)) + +* code review comments ([`644dfd0`](https://github.com/kaskada-ai/kaskada/commit/644dfd0d8828e74be7dac363449ca0ab422c39bc)) + +* removed s3 helper ([`a27a463`](https://github.com/kaskada-ai/kaskada/commit/a27a4639002ab7d2cae99b3ef60bddd73436ec49)) + +* updated todos ([`2f3a7f9`](https://github.com/kaskada-ai/kaskada/commit/2f3a7f9764f4a6042b43972423fefb9950343bdc)) + +* Merge pull request #150 from kaskada-ai/feature/gcp-gcs + +Feature: Add Google Cloud Storage Object Store Support ([`ff7916d`](https://github.com/kaskada-ai/kaskada/commit/ff7916d875813c10965c22df11e8c24909c92e13)) + +* fix unused imports ([`8a45390`](https://github.com/kaskada-ai/kaskada/commit/8a45390928d181be4b1e5b559c35bae9e2ecbdc4)) + +* pub mod is unnecessary since we are exposing with pub use ([`e452ffd`](https://github.com/kaskada-ai/kaskada/commit/e452ffdbd81c0b4afe5f4571f53ba79153704468)) + +* merge ([`df440ca`](https://github.com/kaskada-ai/kaskada/commit/df440ca58cab955b85ea3b6eefdd18d542c0927d)) + +* remove topic_url proto field ([`13d501b`](https://github.com/kaskada-ai/kaskada/commit/13d501bb451321eb1fbd5e97be37cdad3df929f7)) + +* fix hashing to not use topic_url ([`5a7b514`](https://github.com/kaskada-ai/kaskada/commit/5a7b5145b5c89d5602ac09cce3e52e8dc71d534f)) + +* Merge pull request #148 from kaskada-ai/puslar/clients + +updated clients for proto changes ([`e6c8463`](https://github.com/kaskada-ai/kaskada/commit/e6c84634c09d199f739d5d75981d1f9b48137924)) + +* Merge pull request #154 from kaskada-ai/pulsar-fix-clippy-from-proto-changes + +fix clippy from proto changes ([`19ac837`](https://github.com/kaskada-ai/kaskada/commit/19ac8376785b89a051f67d18878cd2da2da3c4d5)) + +* Merge pull request #153 from kaskada-ai/pulsar-allow-openssl-with-sec-vuln + +allow openssl with sec vuln ([`2c6c1d8`](https://github.com/kaskada-ai/kaskada/commit/2c6c1d803956e49bbde9ff7345927c7f82a5ed3a)) + +* Merge pull request #149 from kaskada-ai/tsk-cross-compile-tmp + +(ci) adds linux/aarch64 binaries to release workflow ([`25e4888`](https://github.com/kaskada-ai/kaskada/commit/25e488815a0fe397b4a2e368e006e681198c1534)) + +* Merge branch 'main' into feature/object-store ([`aba7cd8`](https://github.com/kaskada-ai/kaskada/commit/aba7cd8e9eba52a536cf8d71693eb5ed74c08bd6)) + +* allow dead code ([`3b003c9`](https://github.com/kaskada-ai/kaskada/commit/3b003c98bd2c2db018ff21d2efe4985ca6dd5724)) + +* Fix clippy warrnings ([`e410368`](https://github.com/kaskada-ai/kaskada/commit/e4103680d5cd13d3e58cefe939b52af875e17de9)) + +* avoid putting pulsar auth info in the logs ([`adeead4`](https://github.com/kaskada-ai/kaskada/commit/adeead479bb2d78de65fbe4fa84cc4e1f6276fb9)) + +* include subscription and last_publish_time in get_prepare_hash ([`f756e9a`](https://github.com/kaskada-ai/kaskada/commit/f756e9a35ae02f1d471260f7ff7c078b21f85995)) + +* Allow openssl for rustsec-2023-0024 ([`a09809f`](https://github.com/kaskada-ai/kaskada/commit/a09809fdbf70759e35a984cae1164a4ebab1b0e5)) + +* Merge pull request #152 from kaskada-ai/puslar/fix_int_tests + +fixed integration tests ([`fb31afe`](https://github.com/kaskada-ai/kaskada/commit/fb31afe3fb9309bd33bae48a3f0c3b5a09ab2e86)) + +* fixed integration tests ([`92d7224`](https://github.com/kaskada-ai/kaskada/commit/92d7224f7f65f65c5e322ef09bcad728546c023f)) + +* Merge pull request #151 from kaskada-ai/pulsar-fix-proto-imports-from-pulsar-message-updates + +fix proto imports from pulsar message updates ([`de29f3b`](https://github.com/kaskada-ai/kaskada/commit/de29f3ba8556c2f13151973f12904c50aadb45dd)) + +* test fixes ([`8ff055c`](https://github.com/kaskada-ai/kaskada/commit/8ff055cf24fd2e1afd83385329aeb06695805a1d)) + +* fix imports ([`463a8bd`](https://github.com/kaskada-ai/kaskada/commit/463a8bdc846aed8a673f2f702d4f1bee95744bd6)) + +* todos ([`ff3cd2b`](https://github.com/kaskada-ai/kaskada/commit/ff3cd2b6653d39a93d3b612b4e1cefbe93de416b)) + +* updated unwraps() ([`cbfce08`](https://github.com/kaskada-ai/kaskada/commit/cbfce0836fb1e611d445913bdf51caa9c5fc08f8)) + +* added object store builder gcp ([`5abf28f`](https://github.com/kaskada-ai/kaskada/commit/5abf28fa4d47ab8ef1eae64462cae2fa69c52e0d)) + +* added gcp gcs url support ([`478537c`](https://github.com/kaskada-ai/kaskada/commit/478537cc0c9d90518025080c210b92f3c84fa1e3)) + +* clippy ([`5ee5596`](https://github.com/kaskada-ai/kaskada/commit/5ee55965ce04139a8eff92b00cd298067e3a027f)) + +* Merge remote-tracking branch 'origin/main' into tsk-cross-compile-tmp ([`2a67e63`](https://github.com/kaskada-ai/kaskada/commit/2a67e63a4c201faf5a8a84588fcfc60ccbcd45d0)) + +* adds cross compile workflow to release workflow ([`c4a810d`](https://github.com/kaskada-ai/kaskada/commit/c4a810d142a097f586ccd0ba8ba40af6975aa63a)) + +* updated clients for proto changes ([`73ec723`](https://github.com/kaskada-ai/kaskada/commit/73ec723a3b4517ddc83bce38dc64ce4ee77f663d)) + +* use alternative GitHub action for protoc install ([`ad1b801`](https://github.com/kaskada-ai/kaskada/commit/ad1b8013685ba0983ad9f6022b7b9540ee2bcb4c)) + +* use alternative GitHub action for protoc install ([`85b0e6c`](https://github.com/kaskada-ai/kaskada/commit/85b0e6c16e3046c1735393c2a9f6f906801b34c4)) + +* use alternative GitHub action for protoc install ([`e9b1240`](https://github.com/kaskada-ai/kaskada/commit/e9b124019a7bc6378741291e8af1afab5f601b01)) + +* Merge pull request #147 from kaskada-ai/esp/wren_updates + +made proto updates for wren ([`1984a78`](https://github.com/kaskada-ai/kaskada/commit/1984a7830f651f24fee921b3bb5ecf2f0eaceffc)) + +* made proto updates for wren ([`a97a5d4`](https://github.com/kaskada-ai/kaskada/commit/a97a5d41e6c8811bb24a282a4aee8a071bcbf578)) + +* rename back since Github won't let me run this on a branch with a new name that is not in main as well ([`e582d03`](https://github.com/kaskada-ai/kaskada/commit/e582d037160aa88d1f53fccad40fdea97c3687d8)) + +* adds release upload actions ([`2194800`](https://github.com/kaskada-ai/kaskada/commit/2194800049a79d1c648d512c30f10efbde81f3b3)) + +* add pulsar.proto to build.rs ([`1a1f475`](https://github.com/kaskada-ai/kaskada/commit/1a1f47579a18abf3af2b84552060ae9e2681c97e)) + +* Merge pull request #146 from kaskada-ai/esp/proto_and_wren + +commonized some protos ([`ff6afce`](https://github.com/kaskada-ai/kaskada/commit/ff6afcee648ade9ee93e9de4abd60560a6b76ad4)) + +* rename ([`9f149f0`](https://github.com/kaskada-ai/kaskada/commit/9f149f0abf7164c7e0a17baad074c51530e971d9)) + +* moved pulsar stuff to its own file ([`ae6dee7`](https://github.com/kaskada-ai/kaskada/commit/ae6dee772ce584caa6bf52f9995f252295054ac8)) + +* added last_publish_time ([`9113928`](https://github.com/kaskada-ai/kaskada/commit/9113928df7dea1d98ef8ef22029ca1c2534ef274)) + +* fmt ([`8b35f46`](https://github.com/kaskada-ai/kaskada/commit/8b35f46c929326c9daf6ea068f026ebc6419e4c5)) + +* commonized some protos ([`b1e7dff`](https://github.com/kaskada-ai/kaskada/commit/b1e7dfff098c38d68c97df65c92b0149565ac7a2)) + +* "easy" code review fixes ([`179556a`](https://github.com/kaskada-ai/kaskada/commit/179556a158f5e4aedbb7f7ebda27616ae0d141a4)) + +* testing again ([`625fc76`](https://github.com/kaskada-ai/kaskada/commit/625fc76ed3f103fd64e0a0ee40a382bf2bbe7b16)) + +* testing ([`4e086b9`](https://github.com/kaskada-ai/kaskada/commit/4e086b9f6686389b563dfaea26d0252206c3711b)) + +* Merge branch 'feature/object-store' of github.com:kaskada-ai/kaskada into feature/object-store ([`87b1ce3`](https://github.com/kaskada-ai/kaskada/commit/87b1ce39df038b68bb977fa3d70335a31f8fc1b0)) + +* clippy ([`8c72dcd`](https://github.com/kaskada-ai/kaskada/commit/8c72dcdf004f740c35489f93ac6c3b7f4ddcfb1d)) + +* Update README Badges for Combined Engine Workflow ([`3ea60fa`](https://github.com/kaskada-ai/kaskada/commit/3ea60fab42c8c5863b6366f4eb8cb79ebe624591)) + +* add docstring and test for avro_to_arrow ([`11f883e`](https://github.com/kaskada-ai/kaskada/commit/11f883e0fd502eb66f9997dd240940905a9ff1a0)) + +* mv pulsar-source demo to examples/pulsar-prepare; mv examples/pulsar to examples/pulsar-materialize ([`9072031`](https://github.com/kaskada-ai/kaskada/commit/9072031b2e963e04601137e336213e39780a387e)) + +* once more unto the breach ([`99476bb`](https://github.com/kaskada-ai/kaskada/commit/99476bb8c08017c145db26c147793a9349d41f8f)) + +* once more unto the breach ([`af4e585`](https://github.com/kaskada-ai/kaskada/commit/af4e585db07c73a7458dbd1d9a4fb169bdf1aeef)) + +* once more unto the breach ([`f0b789a`](https://github.com/kaskada-ai/kaskada/commit/f0b789a5d7ad53e63ad6f972e9ce833c2e23d390)) + +* once more unto the breach ([`3a2b221`](https://github.com/kaskada-ai/kaskada/commit/3a2b221f96955adb839e269bc870e502bb15f86f)) + +* once more unto the breach ([`8fd8efb`](https://github.com/kaskada-ai/kaskada/commit/8fd8efb79f74569d9780526cb47022ea01c99995)) + +* once more unto the breach ([`3a80e2e`](https://github.com/kaskada-ai/kaskada/commit/3a80e2ed1f2638d1cff8826615f764c4ed891936)) + +* once more unto the breach ([`b364a44`](https://github.com/kaskada-ai/kaskada/commit/b364a4405bb3763166522319ab6cb43987b37f7f)) + +* using go from inside the image ([`9d15b2b`](https://github.com/kaskada-ai/kaskada/commit/9d15b2bafe45072868de40aceaafa143b83c7164)) + +* using go from inside the image ([`fc5b571`](https://github.com/kaskada-ai/kaskada/commit/fc5b571537524040c58243468d6b75f24a3ecb98)) + +* testing ([`0eee2a1`](https://github.com/kaskada-ai/kaskada/commit/0eee2a1fcc8a2a36bb8e690e57931bd0a2ff3890)) + +* testing ([`9e87fe6`](https://github.com/kaskada-ai/kaskada/commit/9e87fe6f2e59fee5b4c5b4bdfb2cfe4821c30655)) + +* testing ([`e5c3adf`](https://github.com/kaskada-ai/kaskada/commit/e5c3adfad6c9a05b615e468aab114a6e6f9300bf)) + +* testing ([`f425b24`](https://github.com/kaskada-ai/kaskada/commit/f425b24f47167529342809cbe2a2a04252e8abef)) + +* testing ([`e6a1a54`](https://github.com/kaskada-ai/kaskada/commit/e6a1a544c3e109460bedcc0da8fe6ba2ec4b0ae6)) + +* docker file cleanup ([`5020268`](https://github.com/kaskada-ai/kaskada/commit/5020268ba90417be174d3783cb905bee44ad8b5f)) + +* testing ([`e32d0b7`](https://github.com/kaskada-ai/kaskada/commit/e32d0b7c63eea185b5315938c0c0f4dde957fe3c)) + +* testing ([`85f4719`](https://github.com/kaskada-ai/kaskada/commit/85f47191dbbe4596d4a722b60e2114fad5ab36ae)) + +* testing ([`77ff0ef`](https://github.com/kaskada-ai/kaskada/commit/77ff0ef18a282cea38c3fafc7014108dec8b366b)) + +* testing ([`a55286f`](https://github.com/kaskada-ai/kaskada/commit/a55286f9afd3bdc1e77bf38f4a5e8ee1fb7536db)) + +* testing ([`719799f`](https://github.com/kaskada-ai/kaskada/commit/719799fd7dc236b0361676cad361eccb7110a1a9)) + +* testing ([`6409398`](https://github.com/kaskada-ai/kaskada/commit/6409398deb401672bd0952dde1e11ff3879bf8f5)) + +* testing ([`8c3c9a2`](https://github.com/kaskada-ai/kaskada/commit/8c3c9a2a5535442e0eea0f53026a68bed8f44d8e)) + +* testing ([`1c69da6`](https://github.com/kaskada-ai/kaskada/commit/1c69da66b454c971a2a902a3b07392839e38a9dd)) + +* testing ([`cb790e3`](https://github.com/kaskada-ai/kaskada/commit/cb790e357016f051d171539997f0cd6a9f34b7a8)) + +* checking docker mount points ([`748ddc4`](https://github.com/kaskada-ai/kaskada/commit/748ddc431b9a0ff883bd341ed0d27cd0dcc646d3)) + +* checking docker mount points ([`c8d2249`](https://github.com/kaskada-ai/kaskada/commit/c8d2249e35e5a13702b99d7310ecf0544be9a7c8)) + +* Merge branch 'main' into feature/object-store ([`33739d2`](https://github.com/kaskada-ai/kaskada/commit/33739d269add244dfefa54265299de2bbb36920b)) + +* removed todo ([`19183f1`](https://github.com/kaskada-ai/kaskada/commit/19183f192021d609a021270bf12269ee7fb8e834)) + +* added tests/initial pass ([`d57c4e2`](https://github.com/kaskada-ai/kaskada/commit/d57c4e24e916090f13ac416fa698eb6da32d98e6)) + +* testing sparrow cross compile from amd64 to arm64 ([`a803ee4`](https://github.com/kaskada-ai/kaskada/commit/a803ee4f39e3a139677b7179b4a27f84c63fec14)) + +* temp Dockerfile to test arm64 ([`f0991a8`](https://github.com/kaskada-ai/kaskada/commit/f0991a8285e2f4c1afa03c790405ee4e07f3de32)) + +* added integration to file service ([`cd982eb`](https://github.com/kaskada-ai/kaskada/commit/cd982ebf05517053f9260204009de20740bc25fe)) + +* added rust sample code ([`7b827cd`](https://github.com/kaskada-ai/kaskada/commit/7b827cdbcf077dee8e7049405207617691ff8471)) + +* remove unused imports, and cargo fmt ([`94153ad`](https://github.com/kaskada-ai/kaskada/commit/94153ad9c875bafd7139e4638c8616b02370413b)) + +* Merge pull request #143 from kaskada-ai/tsk-ci-reusable-outputs + +(ci): propagating should_skip logic to reusable workflows and integ tests ([`5439ef8`](https://github.com/kaskada-ai/kaskada/commit/5439ef8c856d458bfab2dcd6b624998ed112fe44)) + +* Merge pull request #125 from kaskada-ai/rm/docs-faq + +Adding some FAQ's based on customer questions ([`32b4aad`](https://github.com/kaskada-ai/kaskada/commit/32b4aad36003d3cfbf259df82d30b2e8f1e33d70)) + +* update the decimal_fixed_len test files to be well-behaved sparrow inputs. (no timezone on the timestamp, and no decimal columns) ([`afd7f55`](https://github.com/kaskada-ai/kaskada/commit/afd7f55ee6980c7b12d508c5a1e796b0a0679bd1)) + +* add missing cert chain field ([`66004b3`](https://github.com/kaskada-ai/kaskada/commit/66004b3012728a045c580eb7999fc27cde3660ca)) + +* merge from main ([`64f15ca`](https://github.com/kaskada-ai/kaskada/commit/64f15caa0346735c930309bba7ee03cf2d10e43c)) + +* Merge branch 'main' into rm/docs-faq ([`4315781`](https://github.com/kaskada-ai/kaskada/commit/4315781854c587dd5d7967d75cec91ad0ee865fa)) + +* cross compile sparrow works ([`426808e`](https://github.com/kaskada-ai/kaskada/commit/426808e3b54ea1b5174b232dda5c0e4540cb6e15)) + +* Merge branch 'main' into tsk-ci-reusable-outputs ([`07aba69`](https://github.com/kaskada-ai/kaskada/commit/07aba69c3267148685fb68e6c011c0d7e3e8d459)) + +* fix path when downloading integ test binary ([`c3263d9`](https://github.com/kaskada-ai/kaskada/commit/c3263d9c54f10ffb0fbd2f987d1225f2316aaa97)) + +* (ci) propagating should_skip logic to reusable workflows and integ tests ([`b339318`](https://github.com/kaskada-ai/kaskada/commit/b3393184d2d27175882d4cf7d192d65346083ddf)) + +* Merge pull request #140 from kaskada-ai/tsk-ci-fix-skipping + +(ci): skip in reusable workflows for workflow_call events ([`0c76ad1`](https://github.com/kaskada-ai/kaskada/commit/0c76ad173ef16a76e0459b96a2648d1b5bc1d0d0)) + +* Merge pull request #134 from kaskada-ai/pulsar-add-tls-configs + +Add cert chain field to pulsar ([`29fb802`](https://github.com/kaskada-ai/kaskada/commit/29fb8027210d0650cbe8a9bf6665981c3899b559)) + +* (ci): skip test moved to each wren tests not whole job ([`9a43d74`](https://github.com/kaskada-ai/kaskada/commit/9a43d74e377962a0f02efca7239f660ae411e030)) + +* Merge pull request #142 from kaskada-ai/fix/update-temp-file-dir + +Update tempdir path for object store outputs ([`7c03cd8`](https://github.com/kaskada-ai/kaskada/commit/7c03cd8d9894562919eb534a7820d805a1990eab)) + +* Update tempdir path for object store outputs ([`4caa833`](https://github.com/kaskada-ai/kaskada/commit/4caa833ab9746231a5a418036823624c06d062f1)) + +* Uncommenting some install steps. ([`d86cd55`](https://github.com/kaskada-ai/kaskada/commit/d86cd55032dadf1532ec0fb0bb7fe62c968e040a)) + +* added rust object store ([`3b6a486`](https://github.com/kaskada-ai/kaskada/commit/3b6a486055cacbb3376d225f1168c35b5edeabd4)) + +* Merge pull request #141 from kaskada-ai/bjchambers-patch-1 + +Update faq.adoc ([`94e01b3`](https://github.com/kaskada-ai/kaskada/commit/94e01b3ae29cfdcf1427f844d21314c3a8f350f2)) + +* Update faq.adoc ([`70f6c39`](https://github.com/kaskada-ai/kaskada/commit/70f6c3967b9bc2261adffb28073976b04d92e96f)) + +* skip in reusable workflows for workflow_call events ([`2ff47d8`](https://github.com/kaskada-ai/kaskada/commit/2ff47d8962e62f5e8a2bb8ba6ae0a93c7cba879e)) + +* Merge pull request #132 from kaskada-ai/rm/docs-polish + +Some editing work on the docs ([`88d37b8`](https://github.com/kaskada-ai/kaskada/commit/88d37b8dd116e93012351113e67310329a9f5a17)) + +* Merge pull request #139 from kaskada-ai/examples/fix-pulsar-notebook-topic + +Fix topic name in pulsar notebook ([`ce9a383`](https://github.com/kaskada-ai/kaskada/commit/ce9a383679a9092dbb68d29ec46854824d161856)) + +* Adding some FAQ's based on customer questions ([`79589df`](https://github.com/kaskada-ai/kaskada/commit/79589df80ee34074b883b27167c7dded15774a06)) + +* Merge branch 'main' into rm/docs-polish ([`68d9bce`](https://github.com/kaskada-ai/kaskada/commit/68d9bcea28b2199029902924b3176972da5b85b9)) + +* Apply suggestions from code review + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`9fb10fe`](https://github.com/kaskada-ai/kaskada/commit/9fb10fe4a21a697130565844773f3ecab222a952)) + +* Fix topic name in pulsar notebook ([`3836e81`](https://github.com/kaskada-ai/kaskada/commit/3836e81f3efc9fbef77900a1a23afb216f8e71ac)) + +* Merge pull request #137 from kaskada-ai/release/0.4.3 + +Release 0.4.3 ([`c59bed8`](https://github.com/kaskada-ai/kaskada/commit/c59bed82d65b0fefcabb7edce2db06a03b46654f)) + +* Release 0.4.3 ([`c243373`](https://github.com/kaskada-ai/kaskada/commit/c243373752ee85aae50345d30574ca3590e9eb6a)) + +* Merge pull request #135 from kaskada-ai/bug/prepare-s3 + +Updates Preparation Service to use Tempdir on S3 ([`d8ffb1f`](https://github.com/kaskada-ai/kaskada/commit/d8ffb1f508bd20cfb7551522145be77f0e82c308)) + +* use tempdir crate ([`f6d4fca`](https://github.com/kaskada-ai/kaskada/commit/f6d4fcaa4104cca9c4ade85a5780a2b10618a9a6)) + +* fixed s3 prepare and integration tests ([`a5cfa51`](https://github.com/kaskada-ai/kaskada/commit/a5cfa519d2f495b55e0ba4578a3b30636228eac9)) + +* Merge pull request #136 from kaskada-ai/tsk-seed-testing-workflow + +(ci): seeding testing workglow ([`1b6661f`](https://github.com/kaskada-ai/kaskada/commit/1b6661f5f731e277c19e81a325a789b623735afc)) + +* (ci): seeding testing workglow ([`040fce6`](https://github.com/kaskada-ai/kaskada/commit/040fce6b091db1ef41af6ca913db57cdafa8e453)) + +* added quick fix ([`b22a344`](https://github.com/kaskada-ai/kaskada/commit/b22a344cb28bb417f5349e6bafa43b45884a2e69)) + +* update number ([`b4277be`](https://github.com/kaskada-ai/kaskada/commit/b4277be709d0e168c8e78754516f333b2cfbbb50)) + +* Merge pull request #131 from kaskada-ai/tsk-integration-tests-ci + +(ci): Adds integration tests as part of CI ([`38b46e7`](https://github.com/kaskada-ai/kaskada/commit/38b46e703be22b9c67db792c932f2d2bf59e25bc)) + +* Merge pull request #133 from kaskada-ai/python/remote_files + +restored the ability to load files in object stores ([`89497a1`](https://github.com/kaskada-ai/kaskada/commit/89497a1d4b0bbd053515ffe2805a809ac7fa7e7d)) + +* made suggested change ([`14d3a3f`](https://github.com/kaskada-ai/kaskada/commit/14d3a3f305ee4474fb323e98290a5505cf3a58f5)) + +* Add cert chain field to pulsar ([`affc3f1`](https://github.com/kaskada-ai/kaskada/commit/affc3f1798f3417b0208b6719a731aef291565ed)) + +* Merge pull request #129 from kaskada-ai/pulsar-add-auth-to-output + +Add jwt token auth to pulsar output ([`1dd4072`](https://github.com/kaskada-ai/kaskada/commit/1dd4072f37d390171aae0e79c3834294bc11270a)) + +* remove comment ([`101d2d7`](https://github.com/kaskada-ai/kaskada/commit/101d2d71fd425a7c499e982fc893d1fd42ac8032)) + +* fix most tests ([`990d17e`](https://github.com/kaskada-ai/kaskada/commit/990d17e6c22df181bff83def669e56bf7374a578)) + +* restored the ability to load files in object stores ([`8bb758c`](https://github.com/kaskada-ai/kaskada/commit/8bb758c0db58a305844553842d10a0472e54e34b)) + +* Some editing work on the docs + +* Add install section for the CLI (copied from quickstart) +* Added notes in "data loading" about supported input locations +* Removed redundant example queries +* Documented supported materialization destinations, removed Redis + example (since it's currently not working) +* Some minor wording & example visualization improvements +* Reorganized Python & CLI so that Python is consistently first. ([`73d7364`](https://github.com/kaskada-ai/kaskada/commit/73d7364a7e9c6025dd97643889fdc313a174a161)) + +* buld integration api binary on ubuntu 20.04 ([`28d6b8f`](https://github.com/kaskada-ai/kaskada/commit/28d6b8f2b75ce6d857e4d13e772e84b3295f889d)) + +* run integ tests on Ubuntu 20.04 ([`3c5ec81`](https://github.com/kaskada-ai/kaskada/commit/3c5ec8176f705927905d80b3a9570bb7d15f81a5)) + +* use ubuntu:20.04 as base image ([`c872623`](https://github.com/kaskada-ai/kaskada/commit/c8726238bb3a23f05232867e42ff713f26bacfbf)) + +* fmt ([`c4c9a8a`](https://github.com/kaskada-ai/kaskada/commit/c4c9a8a4a985f842d1bf22955326ec96f92e0161)) + +* clippy ([`0bbc3a0`](https://github.com/kaskada-ai/kaskada/commit/0bbc3a0afa71cb1adbfa5603aeb03d0617d2e005)) + +* ignore test--calls a functions that still has todo() ([`db49fea`](https://github.com/kaskada-ai/kaskada/commit/db49fea94f8ce5a16fb9cb6cf3a6e468143a801e)) + +* Cargo.lock update ([`37aef0d`](https://github.com/kaskada-ai/kaskada/commit/37aef0d7fa1face5822e8f5c357bb9be272a325a)) + +* Merge remote-tracking branch 'origin/main' into tsk-integration-tests-ci ([`250424c`](https://github.com/kaskada-ai/kaskada/commit/250424c2707f4c088da172ec5a12a62a3310dfe0)) + +* removing concurrency checks in reusable workflows ([`75b01dc`](https://github.com/kaskada-ai/kaskada/commit/75b01dcc42260e9bfc0aff8ae7667455847116aa)) + +* Merge pull request #63 from kaskada-ai/query-flight-recorder-tool + +feat: Re-enable the QFR tool ([`547e229`](https://github.com/kaskada-ai/kaskada/commit/547e229c919f7fe6bcc4857626ebb2e10aff7b21)) + +* update name to integration tests workflow ([`6b0d80d`](https://github.com/kaskada-ai/kaskada/commit/6b0d80db27972bf5cb4445550a5e10e87a5dea1f)) + +* final changes before CR ([`e3014f7`](https://github.com/kaskada-ai/kaskada/commit/e3014f7cbc9564e0e9d75dd5e3e3708bd2f58045)) + +* Merge remote-tracking branch 'origin/main' into tsk-integration-tests-ci ([`039aac7`](https://github.com/kaskada-ai/kaskada/commit/039aac767efc345b7e3c6ca60fb9f1385a359c74)) + +* Merge branch 'pulsar' of github.com:kaskada-ai/kaskada into pulsar ([`8b04621`](https://github.com/kaskada-ai/kaskada/commit/8b04621cbdd61d8e705c7b3f99d5231a282cc4e1)) + +* plumb pulsar connection proto through prepare, and test that it works with Astra ([`1ec18f8`](https://github.com/kaskada-ai/kaskada/commit/1ec18f867cdae6b3e6d9f85d5de3a264d01f3130)) + +* Add jwt token auth to pulsar output ([`655ac7b`](https://github.com/kaskada-ai/kaskada/commit/655ac7befeea33261863079f87a6984cd9bf3055)) + +* Merge pull request #128 from kaskada-ai/engine-release-0.4.2 + +Update version to release 0.4.2 ([`ad30a88`](https://github.com/kaskada-ai/kaskada/commit/ad30a8822096a45ecb0f49cc49751ee8434f7b82)) + +* Update version to release 0.4.2 ([`dd47a97`](https://github.com/kaskada-ai/kaskada/commit/dd47a97033dee67d2d48c5fbff97ae87d32034a3)) + +* Merge pull request #120 from kaskada-ai/feature/docker-pulsar + +examples: add pulsar docker image and notebook ([`be6793e`](https://github.com/kaskada-ai/kaskada/commit/be6793e055429301240aa3654884fa94125832b8)) + +* Merge pull request #127 from kaskada-ai/pulsar-whitelist-oauth2 + +explicitly allow MPL2.0 ([`1ab4172`](https://github.com/kaskada-ai/kaskada/commit/1ab4172f622cffec8e8b5bf6dce215728bf7e89b)) + +* explicitly allow MPL2.0 ([`ab4247c`](https://github.com/kaskada-ai/kaskada/commit/ab4247c889c5f97a4181eaed5654a2d769427b30)) + +* add new proto files to build.rs ([`b1771e2`](https://github.com/kaskada-ai/kaskada/commit/b1771e2566a917226d698b04013a36588cf92a22)) + +* add to destination as well as source ([`c7b00b3`](https://github.com/kaskada-ai/kaskada/commit/c7b00b300e0b0aa37b0f5b0308e706bf0ab05399)) + +* add admin_service_url, auth_plugin; replace auth_token with auth_params ([`bac5b36`](https://github.com/kaskada-ai/kaskada/commit/bac5b36ee866aa50752dc51e3f790ce6cce0815b)) + +* proposed proto changes to support pulsar sources ([`28f3d3a`](https://github.com/kaskada-ai/kaskada/commit/28f3d3ae76b613b6c01a91f00637786ae0c03e40)) + +* Merge branch 'main' into pulsar ([`c4bb219`](https://github.com/kaskada-ai/kaskada/commit/c4bb2193ff9dd1f1ad7d1898464df20db044288a)) + +* Merge pull request #122 from kaskada-ai/rm/docs-rewrite-getting-started + +Cleanup Getting Started ([`26825a5`](https://github.com/kaskada-ai/kaskada/commit/26825a52ff25350eaf72fbd3b641eaf79bed3f4d)) + +* Comments ([`02ad4c1`](https://github.com/kaskada-ai/kaskada/commit/02ad4c1762ff6f9638dd108e1a5fd5ae52a5f078)) + +* Apply suggestions from code review + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`7d0d415`](https://github.com/kaskada-ai/kaskada/commit/7d0d4153a6ef0ac2bef0e2468cd8bfede0c04d89)) + +* clean notebook ([`ac10460`](https://github.com/kaskada-ai/kaskada/commit/ac1046050d501a422c7f8094062909e9e8f28f45)) + +* Add readme ([`69c503e`](https://github.com/kaskada-ai/kaskada/commit/69c503e0da425d2b8a520bcf8c39f347eff29fd4)) + +* update docker compose ([`89fb0b8`](https://github.com/kaskada-ai/kaskada/commit/89fb0b871fb0963b25126d6bafbb0f3b1ead31d6)) + +* Add example datasets ([`230ca51`](https://github.com/kaskada-ai/kaskada/commit/230ca51fe41c3711c488802ea28ba362c09b2ea5)) + +* Reoganize to sub directory ([`d8f8953`](https://github.com/kaskada-ai/kaskada/commit/d8f89535d9bb9813587e005b7e25ebb2a76b66f9)) + +* update notebook a bit ([`cb45769`](https://github.com/kaskada-ai/kaskada/commit/cb45769ca55441045eda3aa5af0152271ce7e8a0)) + +* update compose name ([`0ea1fe8`](https://github.com/kaskada-ai/kaskada/commit/0ea1fe8aff6a70b0880b46a9bb4df95bfdb21c9b)) + +* fix topic name ([`b4171db`](https://github.com/kaskada-ai/kaskada/commit/b4171dba2526e89a8e6f1290992ad51d8ab1181a)) + +* clean notebook ([`de338ce`](https://github.com/kaskada-ai/kaskada/commit/de338ced3d50aa6eb1e016dea843c6c7163d69fb)) + +* Update docker compose to use public image ([`bb75971`](https://github.com/kaskada-ai/kaskada/commit/bb759715cdf3f604145fdc1457d6f4cf0235651a)) + +* Add docker compose for pulsar demo notebook ([`593d3da`](https://github.com/kaskada-ai/kaskada/commit/593d3da24b33c9a9857e9faf969a0880f28a1f7e)) + +* initial version ([`22fccda`](https://github.com/kaskada-ai/kaskada/commit/22fccda38648a7078abdb94f74ccdd0718a85c9f)) + +* add support for avro long, float, timestampmicros, string ([`c6e62b5`](https://github.com/kaskada-ai/kaskada/commit/c6e62b57c592de6beb3398566f5c77d0fb06fd57)) + +* remove unnecessary box-ing ([`ec130a3`](https://github.com/kaskada-ai/kaskada/commit/ec130a3b780af8833a8b91ad346d7c15a829976f)) + +* add avro_arrow for conversions boilerplate ([`f02ff1f`](https://github.com/kaskada-ai/kaskada/commit/f02ff1fe460f4c51f13ffacd9ad1620056515823)) + +* Cleanup Getting Started + +* New landing page with lots of code examples +* Combine the 3 sections into a single "Hello world Jupyter" page +* Hello world for CLI ([`192abd8`](https://github.com/kaskada-ai/kaskada/commit/192abd81ccf9d2995a33f7341510706d78bfa77e)) + +* Merge pull request #112 from kaskada-ai/rm/docs-cli-tables + +Document tables and loading with the CLI ([`80a3a0a`](https://github.com/kaskada-ai/kaskada/commit/80a3a0aed571442bccc91baa9d219ac8d63bdbb6)) + +* try again ([`69857ad`](https://github.com/kaskada-ai/kaskada/commit/69857ad545fb72003a19ddf733fa74488172f3b4)) + +* try again ([`275702c`](https://github.com/kaskada-ai/kaskada/commit/275702cc6fcc56628eb3cbfbd3047d376db02069)) + +* try again ([`5ab2358`](https://github.com/kaskada-ai/kaskada/commit/5ab2358e4ef9a2df51b3ab748165171f7c44ae8f)) + +* try again ([`70f0191`](https://github.com/kaskada-ai/kaskada/commit/70f019174d5e79b67fa946303b54a5491466ef3d)) + +* try again ([`bd59861`](https://github.com/kaskada-ai/kaskada/commit/bd598619ee499e8d957ed5e524dd7b882ffe7e67)) + +* try again ([`99b9cfa`](https://github.com/kaskada-ai/kaskada/commit/99b9cfab1b33132ea298fbfbf9377f15e43d29ad)) + +* try again ([`abc6670`](https://github.com/kaskada-ai/kaskada/commit/abc667092ee61549a09298fcc82ecf632e0ad2d3)) + +* try again ([`1f27c18`](https://github.com/kaskada-ai/kaskada/commit/1f27c1828bcc3601403f0caed1de28cb641b7629)) + +* this works locally ... testing on CI ([`97e31f0`](https://github.com/kaskada-ai/kaskada/commit/97e31f0d3c2b92ce2532a0ddef3d852fb6ff5dc8)) + +* Merge pull request #119 from kaskada-ai/therapon-patch-1 + +Update Dockerfile.release to use debiab bullseye ([`4b8e5f9`](https://github.com/kaskada-ai/kaskada/commit/4b8e5f9203740d032d3168310746630efa0e82d7)) + +* Update Dockerfile.release to use debiab bullseye + +We build against libc version 2.31 and we need the image we are going to run our binaries to be at least 2.31. For debian that is bullseye (which is the version used to build Ubuntu 20.04) ([`53b9bf9`](https://github.com/kaskada-ai/kaskada/commit/53b9bf93088b5b8ad91cc978fd7dcfadd093f4b3)) + +* Add output from commands ([`d6082c4`](https://github.com/kaskada-ai/kaskada/commit/d6082c42edc4e9600097e627931566509577b6dd)) + +* readme ([`1966bb1`](https://github.com/kaskada-ai/kaskada/commit/1966bb1c946eae71a680ff1f76e3eeaf6182443d)) + +* cargo fix ([`0595694`](https://github.com/kaskada-ai/kaskada/commit/0595694c057d6dfb462c3161059cf45e45380a84)) + +* cargo fmt ([`1adf292`](https://github.com/kaskada-ai/kaskada/commit/1adf292043e5e3ebc4610e5f70e19f3c5aff454f)) + +* batch multiple rows into arrow columns until we read 100k messages or come to the end of the topic, whichever comes first ([`bffa7c7`](https://github.com/kaskada-ai/kaskada/commit/bffa7c7ca5cdb52ee5c1201f2406bb00548738c2)) + +* Merge pull request #114 from kaskada-ai/feature/pulsar-api + +Add Pulsar Table Source ([`5384755`](https://github.com/kaskada-ai/kaskada/commit/53847559440fad64ff604f5cbe1f3a2da1274f05)) + +* Merge pull request #118 from kaskada-ai/esp/wren_port + +move manager to port 3365 for Rest ([`2e6d68b`](https://github.com/kaskada-ai/kaskada/commit/2e6d68b6eacdc6de64e22b5e8289dc768b826037)) + +* move manager to port 3365 for Rest ([`fe03765`](https://github.com/kaskada-ai/kaskada/commit/fe03765252fad0affc2a0b08ca926ca4355447f7)) + +* Merge pull request #117 from kaskada-ai/release/python-0.1.1a8 + +Python Client 0.1.1a8 ([`942a0cf`](https://github.com/kaskada-ai/kaskada/commit/942a0cf5103e30d0f102991ecf56a2eb3cadaeeb)) + +* updated release for python ([`bbee77f`](https://github.com/kaskada-ai/kaskada/commit/bbee77fac2aaa9ddc02137086d3acd44fcc5e4de)) + +* prepares a single row from pulsar at a time ([`cd1f1a3`](https://github.com/kaskada-ai/kaskada/commit/cd1f1a32c8f7d6fb06040311fbcf93088af22127)) + +* Adding first draft of a demo notebook with a narrative for event processing. ([`7c28633`](https://github.com/kaskada-ai/kaskada/commit/7c2863303b7eff814748b4c05994f5028fd4b6f6)) + +* Merge pull request #116 from kaskada-ai/feature/arguments + +Launch Kaskada Binaries with Arguments ([`0314209`](https://github.com/kaskada-ai/kaskada/commit/0314209cd760b333ba7e44108f81bbc6026190d7)) + +* formatting ([`f021bce`](https://github.com/kaskada-ai/kaskada/commit/f021bcef60a4801e9e4f881ad22b7fa0bab4a64e)) + +* types ([`78caf30`](https://github.com/kaskada-ai/kaskada/commit/78caf3078997ab7a5905567dc1a11567b5af16ed)) + +* added notebook example ([`7b08eb2`](https://github.com/kaskada-ai/kaskada/commit/7b08eb2805ca35ef766a458a32b5563890020e27)) + +* added tests ([`7a7ef69`](https://github.com/kaskada-ai/kaskada/commit/7a7ef6989bb3b1190e16faf432645f97280fdf98)) + +* added changes ([`13f6d6a`](https://github.com/kaskada-ai/kaskada/commit/13f6d6a5ddbb04aa9e289426473088c4c7302dfb)) + +* using the stderr/stdour for popen ([`f8265e9`](https://github.com/kaskada-ai/kaskada/commit/f8265e93a90ae43d746c55ccaf84f6a21857312e)) + +* Document tables and loading with the CLI ([`cc34de0`](https://github.com/kaskada-ai/kaskada/commit/cc34de0868999932a744b74219144265b0bcfdbb)) + +* Merge branch 'main' into feature/pulsar-api ([`2a0608f`](https://github.com/kaskada-ai/kaskada/commit/2a0608f6c53ddea45a056fdd69683ccd2d137701)) + +* Merge pull request #113 from kaskada-ai/tsk-engine-release-0.4.1 + +Cutting engine release 0.4.1 ([`acbea54`](https://github.com/kaskada-ai/kaskada/commit/acbea5434f2f32e9de67841877f1b2da4054b713)) + +* clippy fixes ([`7ec7806`](https://github.com/kaskada-ai/kaskada/commit/7ec7806f45a54a9bfe76229bd81662facaa141d3)) + +* Merge branch 'feature/pulsar-api' of github.com:kaskada-ai/kaskada into feature/pulsar-api ([`e660092`](https://github.com/kaskada-ai/kaskada/commit/e660092b278a85a3ef80f8c02d3d25df58006b77)) + +* added notebook example ([`a51cf73`](https://github.com/kaskada-ai/kaskada/commit/a51cf73f996bc2fd65e0c49ef1cf27afa32b952d)) + +* Merge branch 'main' into feature/pulsar-api ([`bbf9063`](https://github.com/kaskada-ai/kaskada/commit/bbf90637dedfa2c62150129996a21ec0e62450c2)) + +* format ([`19aa072`](https://github.com/kaskada-ai/kaskada/commit/19aa072bc1603734be32ccd798e66f566dc6fa63)) + +* added formatter ([`bb9b2ef`](https://github.com/kaskada-ai/kaskada/commit/bb9b2efddd000a87ed9971cc57446291b8ed09e1)) + +* clippy fixes ([`5b5ba21`](https://github.com/kaskada-ai/kaskada/commit/5b5ba21e37e037754b763c4f545be2acbae04427)) + +* clippy fixes ([`b4f0ab6`](https://github.com/kaskada-ai/kaskada/commit/b4f0ab699ee83fa996fa8c33bc197460c8fe2f18)) + +* Cutting engine release 0.4.1 ([`0195e93`](https://github.com/kaskada-ai/kaskada/commit/0195e93874223e1e6a0fbfdd38d9eb63263a861f)) + +* Merge pull request #110 from kaskada-ai/rm/docs-getting-started + +Some (haphazard) changes to docs ([`edaab09`](https://github.com/kaskada-ai/kaskada/commit/edaab09d532d58289ef7f304f647f8eafec65a4e)) + +* added tests ([`6d4bb03`](https://github.com/kaskada-ai/kaskada/commit/6d4bb03c6c2d9c0481f99667469ec77d98a0a25b)) + +* added pulsar table ([`1fc863a`](https://github.com/kaskada-ai/kaskada/commit/1fc863aa503b7047b30c4650799ef898401be697)) + +* proto changes ([`bc036fb`](https://github.com/kaskada-ai/kaskada/commit/bc036fb4b4f53cdb41a5ac360a2bae9453f64326)) + +* Merge pull request #111 from kaskada-ai/tsk-ci-use-ubuntu20 + +ci: use ubuntu 20.04 for engine builds and release ([`855921b`](https://github.com/kaskada-ai/kaskada/commit/855921bbb6fcad9f0e344e38f2b833783998ffbb)) + +* Theo comments ([`7685771`](https://github.com/kaskada-ai/kaskada/commit/7685771a043f4bc55b4b071a7120a7c163a13be4)) + +* Some (haphazard) changes to docs + +* Use getting started as the start page for the docs +* Simplify the getting-started section some, remove duplicated content +* document CLI usage for views +* Make section names more concise & direct +* Fold "example queries" into the "queries" section +* Remove some "you need to install a client" warnings ([`9231c64`](https://github.com/kaskada-ai/kaskada/commit/9231c6414916f30d24c3269da40e829a264d15db)) + +* Merge pull request #108 from kaskada-ai/rm/ml-example + +Add an example running through the ML workflow described in the docs ([`7c0b36f`](https://github.com/kaskada-ai/kaskada/commit/7c0b36f37e3baa652dd069783e81796abcd09cd9)) + +* fmt ([`e247506`](https://github.com/kaskada-ai/kaskada/commit/e247506070e7bda02713c68cee969e3af95a1b4a)) + +* fix clippy ([`12c22dc`](https://github.com/kaskada-ai/kaskada/commit/12c22dc42533647d00e0b6a599cdc396aefe0d52)) + +* Merge pull request #109 from kaskada-ai/rm/docs-refresh2 + +Move things around to align to information architecture ([`db1b719`](https://github.com/kaskada-ai/kaskada/commit/db1b7197e99e395fc69243f968100470d9d85339)) + +* fix yaml ([`1ce5f13`](https://github.com/kaskada-ai/kaskada/commit/1ce5f1328eda723c37822b8df9b205ebd9ce2f6e)) + +* fix references to env variables ([`55ed591`](https://github.com/kaskada-ai/kaskada/commit/55ed59144abad7facdb341e7ba88a1b08e329453)) + +* fix references to env variables ([`bee07e5`](https://github.com/kaskada-ai/kaskada/commit/bee07e5ca13c10aaaaa813d00a80affd96248469)) + +* adds docker image and integ test execution ([`2be8dcd`](https://github.com/kaskada-ai/kaskada/commit/2be8dcd913faaeb3dc47d3b5c1032ea487c621e5)) + +* Drop unneded type conversion and document actual requirements ([`0ac4938`](https://github.com/kaskada-ai/kaskada/commit/0ac49383e34067fcf2101b2aacab5b9538d4a8b8)) + +* Update docs-src/modules/ROOT/pages/loading-data.adoc + +Co-authored-by: Therapon Skoteiniotis <therapon@users.noreply.github.com> ([`7e4e96b`](https://github.com/kaskada-ai/kaskada/commit/7e4e96b1a47ccc28e890e239a610621b0cee5b97)) + +* Merge pull request #107 from kaskada-ai/rm/cli-load + +Document the workflow using CLI ([`5233618`](https://github.com/kaskada-ai/kaskada/commit/5233618f59a46607d5f98605884c793eecb68c5c)) + +* Add mocks for interfaces ([`e49f2aa`](https://github.com/kaskada-ai/kaskada/commit/e49f2aaa2a8c87b4921b4fe010ce5cd7e16d8a8e)) + +* Move things around to align to information architecture +g ([`c8dc91b`](https://github.com/kaskada-ai/kaskada/commit/c8dc91bbe0bd8cbdfbbca15f5dca329786d266a0)) + +* Add an example running through the ML workflow described in the docs ([`7d51e77`](https://github.com/kaskada-ai/kaskada/commit/7d51e77d014d50e6f102d46817ea5069352c1c75)) + +* Document the workflow using CLI + +This invovled added a "load" function to the CLI client and making a +couple fairly small changes to other parts of the CLI. + +* Ability to write query results to STDOUT rather than just dumping the + response as JSON (so you can easily see the results) +* Ability to parse a query fron STDIN instead of requiring it as an + argument (to support multi-line) ([`dea5df2`](https://github.com/kaskada-ai/kaskada/commit/dea5df21df68f7afbf5f659702f9d51ad868697a)) + +* Merge pull request #91 from kaskada-ai/rm/docs-refresh + +Move the overview to the "how-to" section as an ML example ([`3703acd`](https://github.com/kaskada-ai/kaskada/commit/3703acdb7bf7ed9b3d951077f8ebda5801938dbc)) + +* Merge pull request #105 from kaskada-ai/engine/fix-shift-to-bounds + +bug: fix off by one error in shift to ([`77eba8a`](https://github.com/kaskada-ai/kaskada/commit/77eba8aeb90935d3fe06266848fb5acfa3dce3ff)) + +* only upload integ test binary ([`2c644a5`](https://github.com/kaskada-ai/kaskada/commit/2c644a5fd16857daa8d4899cf3f1fa07a7bfc3d1)) + +* Fix off by one error in shift to ([`224d34d`](https://github.com/kaskada-ai/kaskada/commit/224d34d512c3213a100153b103d2f1906e0e4b24)) + +* Merge pull request #104 from kaskada-ai/tsk-integration-tests-ci + +use ginkgo command to build integ test binary ([`a01d00c`](https://github.com/kaskada-ai/kaskada/commit/a01d00c2a85d7fb2e381e01c9857def146f6eead)) + +* use ginkgo command to build integ test binary ([`5793034`](https://github.com/kaskada-ai/kaskada/commit/57930349406ddf643d7ab7aeeb462ba66efb35c8)) + +* Merge pull request #103 from kaskada-ai/tsk-integration-tests-ci + +fix donwload locations for artifacts ([`d71825c`](https://github.com/kaskada-ai/kaskada/commit/d71825c5d72ad5cb402d3123e5bfed452dc1bc8e)) + +* fix donwload locations for artifacts ([`0b2d368`](https://github.com/kaskada-ai/kaskada/commit/0b2d36819a2441b12c944991b789f01c6bcbde45)) + +* Merge pull request #86 from kaskada-ai/output-at-time + +feat: produce output at user-defined time ([`3906b76`](https://github.com/kaskada-ai/kaskada/commit/3906b76757ab85d425e070e60e36f1e61284054d)) + +* Merge pull request #102 from kaskada-ai/tsk-integration-tests-ci + +remove all concurrency for now ([`0f5937e`](https://github.com/kaskada-ai/kaskada/commit/0f5937ebe7b1c8d5029da46c4f6433099bd79dba)) + +* remove all concurrency for now ([`e4a237d`](https://github.com/kaskada-ai/kaskada/commit/e4a237d98a36ea5963d2101d5307a1e4992670dd)) + +* Merge pull request #101 from kaskada-ai/tsk-integration-tests-ci + +fix download of ginkgo ([`5202ae9`](https://github.com/kaskada-ai/kaskada/commit/5202ae99310c88124fac6360227e0e696a897b00)) + +* fix download of ginkgo ([`186e417`](https://github.com/kaskada-ai/kaskada/commit/186e4175f5f27543cc1d8f5edbf99d655301aa07)) + +* Merge pull request #99 from kaskada-ai/tsk-integration-tests-ci + +add workflow file to non skipable paths ([`c35538b`](https://github.com/kaskada-ai/kaskada/commit/c35538bd6280f67193f29fe8764ef46e2fb69ad6)) + +* add workflow file to non skipable paths ([`30bf5b0`](https://github.com/kaskada-ai/kaskada/commit/30bf5b05e7c195a106db55a11895f8f473cd55e7)) + +* Merge pull request #97 from kaskada-ai/tsk-integration-tests-ci + +fix JSON error ([`5713f0e`](https://github.com/kaskada-ai/kaskada/commit/5713f0ec6c613418d8f46570b539eef02b1535e6)) + +* fix JSON error ([`aa77067`](https://github.com/kaskada-ai/kaskada/commit/aa77067a94b2d2231bf2b4846ddb02c1595c53d6)) + +* Merge pull request #96 from kaskada-ai/tsk-integration-tests-ci + +do not skipp on workflow_call ([`60f75fc`](https://github.com/kaskada-ai/kaskada/commit/60f75fc54a66eb41e18f889215cd4ac01d227946)) + +* do not skipp on workflow_call ([`4f7c115`](https://github.com/kaskada-ai/kaskada/commit/4f7c115ca1091209c9814ca3ddc4edad9091b5c4)) + +* Merge pull request #95 from kaskada-ai/tsk-integration-tests-ci + +remove concurrency ([`e84929b`](https://github.com/kaskada-ai/kaskada/commit/e84929b0cf18fcb4f5f6407a45e9d2ea55468513)) + +* remove concurrency ([`4a7e0a3`](https://github.com/kaskada-ai/kaskada/commit/4a7e0a33c98041cc724b15b207f6bb63429b1d09)) + +* Merge pull request #94 from kaskada-ai/tsk-integration-tests-ci + +add needs ([`57cb921`](https://github.com/kaskada-ai/kaskada/commit/57cb92145f27eb1db4a3c916ca3bd74dbb5424d4)) + +* add needs ([`700b198`](https://github.com/kaskada-ai/kaskada/commit/700b19800a77aa54d94da5f82b153b60f93acdef)) + +* Merge pull request #93 from kaskada-ai/tsk-integration-tests-ci + +Tsk integration tests ci ([`88c1b00`](https://github.com/kaskada-ai/kaskada/commit/88c1b00b28e0b576a01ee4d5c2dcb0f67fc4ee76)) + +* moves reusable workflow calls to jobs from steps ([`322109b`](https://github.com/kaskada-ai/kaskada/commit/322109b837f7b3119a4936aaece4320c193957ef)) + +* Merge remote-tracking branch 'origin/main' into tsk-integration-tests-ci ([`1fa4b2b`](https://github.com/kaskada-ai/kaskada/commit/1fa4b2ba46e4cab3543fdd99927f9e7cccf5eebd)) + +* Merge pull request #89 from kaskada-ai/tsk-integration-tests-ci + +ci: adds integration test CI workflow ([`4a4d018`](https://github.com/kaskada-ai/kaskada/commit/4a4d018d9013eb4a6a57926212dc9f49d635e029)) + +* Merge branch 'tsk-integration-tests-ci' of github.com:kaskada-ai/kaskada into tsk-integration-tests-ci ([`aa71172`](https://github.com/kaskada-ai/kaskada/commit/aa7117206f01a55047cef0574c4a92aad48ece6d)) + +* Merge branch 'main' into tsk-integration-tests-ci ([`19a0b00`](https://github.com/kaskada-ai/kaskada/commit/19a0b00ef6064dc527b41ab6bfa05642e43d6413)) + +* Merge pull request #92 from kaskada-ai/feature/release-0.3.0 + +Binary Release 0.3.0 and Python Client 0.1.1a7 ([`cb602c1`](https://github.com/kaskada-ai/kaskada/commit/cb602c10d24fa0a51750f2d5653225344bfbdf78)) + +* Merge branch 'main' into feature/release-0.3.0 ([`1ff0a48`](https://github.com/kaskada-ai/kaskada/commit/1ff0a48a366752e9d6cca72a3708017c725813cf)) + +* 0.3.0 ([`df8bf63`](https://github.com/kaskada-ai/kaskada/commit/df8bf6310744f67984238815a9ecf79338ac2504)) + +* bumped versions ([`cbf37f5`](https://github.com/kaskada-ai/kaskada/commit/cbf37f5b927378050279e4574905e232f294d796)) + +* Merge pull request #52 from kaskada-ai/kerinin-patch-1 + +Remove comment about support ([`27281f7`](https://github.com/kaskada-ai/kaskada/commit/27281f7a1aff43185971ad9faadd42d4a31f38e1)) + +* Move the overview to the "how-to" section as an ML example ([`493fa19`](https://github.com/kaskada-ai/kaskada/commit/493fa1939d4f9a3446d514ba279a098fca9a6152)) + +* Remove comment about support ([`98612fc`](https://github.com/kaskada-ai/kaskada/commit/98612fc4f8cfe715a30050986e99139ede65aa6c)) + +* set shell for run command ([`a8da12e`](https://github.com/kaskada-ai/kaskada/commit/a8da12ea2610d93b60d2c58739d69a3b2b02c55b)) + +* Merge pull request #85 from kaskada-ai/rm/fix-docs + +Minor fixes from a run-through ([`fc9018a`](https://github.com/kaskada-ai/kaskada/commit/fc9018ad968cd21b2d6c40f382b14c66dfa14135)) + +* Merge branch 'main' into tsk-integration-tests-ci ([`b83596f`](https://github.com/kaskada-ai/kaskada/commit/b83596ff169e2cb4d4c86d810487b403d4740f95)) + +* Merge pull request #79 from kaskada-ai/feature/examples-readme + +Update Examples with README and Makefile ([`36ec6e3`](https://github.com/kaskada-ai/kaskada/commit/36ec6e390235a3d274a6ed424c06b18febd3b776)) + +* Merge branch 'main' into feature/examples-readme ([`aa76b39`](https://github.com/kaskada-ai/kaskada/commit/aa76b39cfd6d2fc0d216b818b6f855dd948cec77)) + +* Merge pull request #87 from kaskada-ai/tsk-fix-integ-tests + +tests: updates integ tests for field name change Query => Expression ([`f7a569c`](https://github.com/kaskada-ai/kaskada/commit/f7a569c727ba2a78eee009619d53764c0f3cfa81)) + +* Update code samples, add bare-bones documentation for supported feature store targets. ([`25e2913`](https://github.com/kaskada-ai/kaskada/commit/25e29131097e2c8828d76d85ec4693940c7ed7e2)) + +* Merge branch 'main' into feature/examples-readme ([`1801638`](https://github.com/kaskada-ai/kaskada/commit/18016383d6825d5aba1f4b48c87cde26b74d0792)) + +* Merge branch 'feature/examples-readme' of github.com:kaskada-ai/kaskada into feature/examples-readme ([`ec00895`](https://github.com/kaskada-ai/kaskada/commit/ec00895455e71249ff585efe23e03e89c8db903c)) + +* code review comments ([`83970be`](https://github.com/kaskada-ai/kaskada/commit/83970be21cf8cea528c0bb0129737cdba504530d)) + +* tests: updates integ tests for field name change Query => Expression ([`5883d03`](https://github.com/kaskada-ai/kaskada/commit/5883d030af248c7a1485b19330920526c22f718e)) + +* Merge pull request #76 from kaskada-ai/add-materialization-version + +feat: add incrementing materialization version ([`9ac802f`](https://github.com/kaskada-ai/kaskada/commit/9ac802fd9437b827e33a079360604dcb57f3e856)) + +* fix type issues ([`3e1ded6`](https://github.com/kaskada-ai/kaskada/commit/3e1ded6732aec9eac626264e4d151917d8178dbf)) + +* change field to i64 ([`eea18bd`](https://github.com/kaskada-ai/kaskada/commit/eea18bd7be54da1f5e0fef61861d20f47a9fec2f)) + +* update mat to use expression instead of queyr ([`18138b2`](https://github.com/kaskada-ai/kaskada/commit/18138b2663a1037ef18f5d0a400211fcd9c930e8)) + +* Add incrementing mat version ([`5dbc173`](https://github.com/kaskada-ai/kaskada/commit/5dbc1733d6a5177dda945de2b3615bda9ed238ed)) + +* add upper bound to tablre edaer ([`a0785a1`](https://github.com/kaskada-ai/kaskada/commit/a0785a197417732a1d8ab3df0d6cd580a63d2cb6)) + +* assertions on behaviors ([`08bafb4`](https://github.com/kaskada-ai/kaskada/commit/08bafb4b050e2fad4322e93369ca0b4a2c18e3a4)) + +* Minor fixes from a run-through ([`58087df`](https://github.com/kaskada-ai/kaskada/commit/58087df363dda3f41c21fd81300eb7ba9ee6d9ff)) + +* add at time enum ([`39ca6bb`](https://github.com/kaskada-ai/kaskada/commit/39ca6bb112f94b627dca82717be2040fb5f5a4fa)) + +* Fix final at time with ticks ([`89a08ee`](https://github.com/kaskada-ai/kaskada/commit/89a08ee3b05ed2776eb7b323e7efcff08aefacc4)) + +* Merge branch 'main' into feature/examples-readme ([`691a138`](https://github.com/kaskada-ai/kaskada/commit/691a13823150a10bf974708606270fa6f38114b6)) + +* added readme ([`91268fd`](https://github.com/kaskada-ai/kaskada/commit/91268fddd31f476d59f4ec563e21b7e93c505fb7)) + +* updated readme ([`803acb7`](https://github.com/kaskada-ai/kaskada/commit/803acb717f1b9576c20aee21df28bca386e3d5a3)) + +* Merge pull request #83 from kaskada-ai/esp/fix_wren + +updated manager to set defaults on create query ([`2cac36c`](https://github.com/kaskada-ai/kaskada/commit/2cac36c21e60f2a30bac0eebfef6f6c996cfbbea)) + +* validate dest is set in createmat ([`82470a1`](https://github.com/kaskada-ai/kaskada/commit/82470a11ac8fef10c224cd6ad01ec9067400e14c)) + +* add null check in manager ([`247a1c6`](https://github.com/kaskada-ai/kaskada/commit/247a1c6f758168eab4375c5e7e910e8004684262)) + +* updated manager to set defaults on create query ([`cd164c6`](https://github.com/kaskada-ai/kaskada/commit/cd164c6c5d4214d6322c69664c5f96dc7d3f66cf)) + +* Merge pull request #81 from kaskada-ai/rm/fix-docs + +Some doc fixes ([`e24199b`](https://github.com/kaskada-ai/kaskada/commit/e24199b9e5253e7bcea257ca7735f6fab60109ed)) + +* Merge pull request #71 from kaskada-ai/benchmark-cli + +Resurrect the cli transactions benchmark ([`f50cbb8`](https://github.com/kaskada-ai/kaskada/commit/f50cbb8cef0bf174201e041f9a3bd263a330e214)) + +* Some doc fixes ([`d0b10cf`](https://github.com/kaskada-ai/kaskada/commit/d0b10cf9537f1bf4608310a4f23dd9d47dc8754b)) + +* Merge branch 'main' into benchmark-cli ([`b43c987`](https://github.com/kaskada-ai/kaskada/commit/b43c987c6dbb7765f49d8e904cb1f8602c74d80e)) + +* Merge pull request #75 from kaskada-ai/tempfile + +upgrade to tempfile 3.4 which eliminates security-problem dependency remove_dir_all ([`6de3c26`](https://github.com/kaskada-ai/kaskada/commit/6de3c26b6a0334491e39054fdcb623a02234a08f)) + +* added new examples readme ([`f90c57a`](https://github.com/kaskada-ai/kaskada/commit/f90c57a9429eef167cb50e08f380fec0785211e0)) + +* Merge pull request #74 from kaskada-ai/update-python-version + +update python client ([`efa9d67`](https://github.com/kaskada-ai/kaskada/commit/efa9d67228b66e433e81d561dfdf9958defca668)) + +* Merge pull request #77 from kaskada-ai/esp/fix_render + +fixed fenl magic rendering ([`f7064b2`](https://github.com/kaskada-ai/kaskada/commit/f7064b216e8c01f9bf9dde327f0a45040ed6c561)) + +* fixed fenl magic rendering ([`7a13b35`](https://github.com/kaskada-ai/kaskada/commit/7a13b3545417f34140ef474784ff8391df2081c8)) + +* upgrade to tempfile 3.4 which eliminates security-problem dependency remove_dir_all ([`641bada`](https://github.com/kaskada-ai/kaskada/commit/641bada220d8adda7720c7e8cd8ab77788fb1457)) + +* remove debug logging that is redundant wrt spans, and move some info to debug ([`688f20b`](https://github.com/kaskada-ai/kaskada/commit/688f20b356b33d55a59700382c0f9f02e0879512)) + +* update ObjectStoreDestination to use file_type instead of file_format ([`ba43941`](https://github.com/kaskada-ai/kaskada/commit/ba43941529329ecb8f9bd50df0734e3fccd3eb09)) + +* improve Debug implementations and use finish_non_exhaustive where applicable ([`0ed5b6a`](https://github.com/kaskada-ai/kaskada/commit/0ed5b6aa5dea56d09fe0809e3ca76b96b672b86d)) + +* markdown-ify README ([`3ca654c`](https://github.com/kaskada-ai/kaskada/commit/3ca654ce896bf5e1d6429c6a519dd835bcf13bc2)) + +* add readme ([`fb3bb47`](https://github.com/kaskada-ai/kaskada/commit/fb3bb47dd95b00df2f4b7e5f251701a37f905330)) + +* rename schema and query files ([`0f2a9e6`](https://github.com/kaskada-ai/kaskada/commit/0f2a9e6053bdf543c03aa24dce4452c152f70f43)) + +* update python client ([`64911ed`](https://github.com/kaskada-ai/kaskada/commit/64911ed94e540d0e69d4415cab599bc046fe2d88)) + +* add simple query-mean ([`78e8b37`](https://github.com/kaskada-ai/kaskada/commit/78e8b37bccf76e43eb03aea0781f55ee9f07ecbf)) + +* split schema and script files apart ([`47bb8e8`](https://github.com/kaskada-ai/kaskada/commit/47bb8e8baf033ea6197fed86646c9e8ebfe3c1e3)) + +* use msno as group_column_name so we don't need python preprocessing ([`5dd527b`](https://github.com/kaskada-ai/kaskada/commit/5dd527b3b350bed55dcb13da98b7899e90483b7e)) + +* adding debug timing to writes ([`4558f63`](https://github.com/kaskada-ai/kaskada/commit/4558f6366f1e7e7b471fcdcba96e078c0d300fe9)) + +* add Debug trait to Operation and log timings at debug level ([`dbb9857`](https://github.com/kaskada-ai/kaskada/commit/dbb985779c4e8916e0b05666c8cf1afb0c4794f4)) + +* update query yaml for modern kaskada ([`e595370`](https://github.com/kaskada-ai/kaskada/commit/e5953701a54f483cdf1bc90a12fbda3b748578f2)) + +* check at the beginning for valid output destination instead of doing a bunch of work and then panicing ([`85f40f2`](https://github.com/kaskada-ai/kaskada/commit/85f40f2db8bbf57e65883d22c6a56e53599d8d31)) + +* output metadata as yaml for easy paste into protobuf structures ([`1f255d6`](https://github.com/kaskada-ai/kaskada/commit/1f255d617f6ae67fce1d6b9cb320f93dfc9fe3db)) + +* add unmodified query1 ([`6433a9f`](https://github.com/kaskada-ai/kaskada/commit/6433a9fe474da7a99b85bcb8af14fa8307635e95)) + +* Merge pull request #54 from kaskada-ai/fix-typo-in-error-status + +fix: typo in error status ([`9ad99ff`](https://github.com/kaskada-ai/kaskada/commit/9ad99ff5fe6cbed7017c4dcff5478f1fa31d4049)) + +* Merge pull request #49 from kaskada-ai/prepare-csv + +allow sparrow-main prepare to work against csv ([`be50606`](https://github.com/kaskada-ai/kaskada/commit/be5060685f33af1966be7eacc495e5e9aae9bd2c)) + +* Merge pull request #61 from kaskada-ai/tsk-docs-update + +docs: fixes titles and updates quick start ([`2457deb`](https://github.com/kaskada-ai/kaskada/commit/2457deb807175e943b4b5aeaccb6e419b5cb1296)) + +* Merge pull request #51 from kaskada-ai/python/rename + +various python cleanup ([`d393983`](https://github.com/kaskada-ai/kaskada/commit/d393983f9d4cacc83b53581c1d03fae043426dfc)) + +* Merge pull request #69 from kaskada-ai/release/add-v-to-tag-prefix + +fix: add v prefix to release drafter tag prefix ([`3329338`](https://github.com/kaskada-ai/kaskada/commit/3329338dcf4dd0d0daa6a4180c41c5bcb51ba601)) + +* Add v prefix to tag prefix ([`a39284f`](https://github.com/kaskada-ai/kaskada/commit/a39284fd6609c209b268e7e1f657295936b547e6)) + +* Merge pull request #59 from kaskada-ai/engine/shift-by-function + +feat: add shift by function ([`506dbcf`](https://github.com/kaskada-ai/kaskada/commit/506dbcf0d886846e08efc763ef181ff6fa621b7b)) + +* Merge pull request #66 from kaskada-ai/release/update-github-release-drafter + +fix: add missing v in release drafter ([`c6bbff4`](https://github.com/kaskada-ai/kaskada/commit/c6bbff49f81c21eac131b55a4d55bfb56223e8af)) + +* Add missing v in release drafter ([`34effd8`](https://github.com/kaskada-ai/kaskada/commit/34effd8ac8651a66dac784a8dd8a5ea9599ed165)) + +* simplify signature ([`7558632`](https://github.com/kaskada-ai/kaskada/commit/755863255ab2eeb88be9998a7abf2d2676a87f3c)) + +* Add shift by function ([`04fa529`](https://github.com/kaskada-ai/kaskada/commit/04fa5297902e91654bc292fd956f4ee3d352f395)) + +* Merge pull request #62 from kaskada-ai/release/release-0.2.0 + +release: update versions for 0.2.0 release ([`daacd6a`](https://github.com/kaskada-ai/kaskada/commit/daacd6aa5abd9543bda587046a096aa6533c23e2)) + +* comment ([`de7d09d`](https://github.com/kaskada-ai/kaskada/commit/de7d09d541e50c4a1a528e934738604b5ac34f34)) + +* update version for release ([`2e5dc4e`](https://github.com/kaskada-ai/kaskada/commit/2e5dc4ef310fa212a057d490a3ceee79811b7e44)) + +* fixes after rebase ([`191107c`](https://github.com/kaskada-ai/kaskada/commit/191107cb964b29ca1bbbe73feece72f90ea5c5b7)) + +* Create issue templates ([`4438fd3`](https://github.com/kaskada-ai/kaskada/commit/4438fd3d4a2d4e9f33e15a69e399cfe98fbc7f98)) + +* fixed formatting ([`1c5e926`](https://github.com/kaskada-ai/kaskada/commit/1c5e926f0e574844f0bd35f89b241ad5d28fa6fe)) + +* added back QueryResource ([`9328a8b`](https://github.com/kaskada-ai/kaskada/commit/9328a8b00e3f069fd1f7bad024d14d0b7faa1772)) + +* requested changes ([`4dbdd6c`](https://github.com/kaskada-ai/kaskada/commit/4dbdd6ce82c40666133292aa15e89e32cde30b51)) + +* fix formatting ([`bb7765e`](https://github.com/kaskada-ai/kaskada/commit/bb7765e92209a2b7cbbb37476246cde4cc68e48e)) + +* various python cleanup ([`1804872`](https://github.com/kaskada-ai/kaskada/commit/18048724a03d7c633e1b132c42a0b5764ea204d4)) + +* Merge pull request #58 from kaskada-ai/move-registry-to-header + +ref: Move static info to flight record header ([`026f473`](https://github.com/kaskada-ai/kaskada/commit/026f473507c9ffb8a143bdf13a18033058af4edc)) + +* fix calls ([`2ad99ed`](https://github.com/kaskada-ai/kaskada/commit/2ad99edeafe80d7690c024607a15b498d3cdfc7e)) + +* ref: Move static info to flight record headre ([`055aed9`](https://github.com/kaskada-ai/kaskada/commit/055aed911990a3bc8cd2d2d41e2216206eaa6480)) + +* Merge pull request #53 from kaskada-ai/jbellis-patch-1-2 + +clarify uniqueness and subsort requirements ([`5f94220`](https://github.com/kaskada-ai/kaskada/commit/5f942205266750eacc127cd690a0aeef9d5aff91)) + +* Merge branch 'main' into jbellis-patch-1-2 ([`a0d70c8`](https://github.com/kaskada-ai/kaskada/commit/a0d70c81b6464b36776015af2d33cfae271e4258)) + +* "the" time column ([`953e181`](https://github.com/kaskada-ai/kaskada/commit/953e18152bc4ea7c7a62df3649b1fbb04fce99dc)) + +* Merge pull request #48 from kaskada-ai/query-flight-recorder-generalization + +feat: Re-introduce generalized Flight Recorder ([`5c3c9d5`](https://github.com/kaskada-ai/kaskada/commit/5c3c9d568140d9b2ea82282641b6a7c60207f8ca)) + +* Merge pull request #10 from kaskada-ai/engine/pulsar-materialization-framework + +feat: Add basic pulsar materialization framework ([`a3ed809`](https://github.com/kaskada-ai/kaskada/commit/a3ed8092ccf5ad9259bd2b35f7f2c1564281db08)) + +* Merge branch 'main' into query-flight-recorder-generalization ([`019d34b`](https://github.com/kaskada-ai/kaskada/commit/019d34be630e696f81a2067b56d5dc62abf4bcef)) + +* fix error type ([`ce534b1`](https://github.com/kaskada-ai/kaskada/commit/ce534b1a6a63c65e40878de1fa0ac35154d79d71)) + +* Add pulsar materialization framework ([`519945f`](https://github.com/kaskada-ai/kaskada/commit/519945f2eda28b9fc53c028c27692907846cf050)) + +* Add pulsar materialization framework ([`44dc5a5`](https://github.com/kaskada-ai/kaskada/commit/44dc5a56b33b13b11b472a77562c858b81839763)) + +* Merge pull request #57 from kaskada-ai/ci/use-single-thread-for-cargo-test + + fix: temporarily use single threaded for cargo test in CI #56 ([`1c6059e`](https://github.com/kaskada-ai/kaskada/commit/1c6059e05bf7688ac3c30428872c136cc608ef28)) + +* Add comment ([`2317259`](https://github.com/kaskada-ai/kaskada/commit/2317259d85259ce370587bb7891f96fdfdf4431c)) + +* serial tests ([`50345e5`](https://github.com/kaskada-ai/kaskada/commit/50345e5fce1571cd730b7e3e1fc48c9c4a2226ec)) + +* skip cache step ([`f753078`](https://github.com/kaskada-ai/kaskada/commit/f75307859d394a9e7f186a9715d0ed0dbc10b4ce)) + +* borrow, don't clone ([`e18fb25`](https://github.com/kaskada-ai/kaskada/commit/e18fb2538e47941d7f86cacdd729c14db3569daf)) + +* comments ([`690b601`](https://github.com/kaskada-ai/kaskada/commit/690b6012d75a34f5af6a00261ff22ebf9acdecb9)) + +* clarify uniqueness and subsort requirements ([`32072b9`](https://github.com/kaskada-ai/kaskada/commit/32072b911b64f57bb408c87186a55e1fb071285e)) + +* Merge pull request #50 from kaskada-ai/feature/add-load-options + +Add Load from Dataframe to Python Client ([`ef27615`](https://github.com/kaskada-ai/kaskada/commit/ef2761599eb35205457ca9490d3b14105007f443)) + +* change activity naming ([`fded208`](https://github.com/kaskada-ai/kaskada/commit/fded208d8b601237d700c31bf47abab77cbfca54)) + +* Merge branch 'main' into feature/add-load-options ([`b454658`](https://github.com/kaskada-ai/kaskada/commit/b454658bdc58cd5be2593ff9b5f6f1dcaf707a24)) + +* Merge pull request #29 from kaskada-ai/bug/prepare-metadata + +Prepare Metadata Flush Per Batch ([`162ea33`](https://github.com/kaskada-ai/kaskada/commit/162ea33140118cebbf2325591fb80a9878c88662)) + +* Merge branch 'main' into feature/add-load-options ([`780f218`](https://github.com/kaskada-ai/kaskada/commit/780f21842411f2ba382f710f419e46b1667b75d3)) + +* assertion change ([`c0a8fe8`](https://github.com/kaskada-ai/kaskada/commit/c0a8fe8f96502cb38a5d21562da5276479c7cff6)) + +* use FilePath::try_from_local ([`5f5f5c6`](https://github.com/kaskada-ai/kaskada/commit/5f5f5c63a6ae8a5a038c577988e56ff4d4fe43c9)) + +* allow sparrow-main prepare to work against csv ([`5556089`](https://github.com/kaskada-ai/kaskada/commit/555608904acf858a1bd4e9a450865f521c8ea7b4)) + +* code review comments ([`eb3682a`](https://github.com/kaskada-ai/kaskada/commit/eb3682a93e10fa4e7df833dc53c6c3cfef911bff)) + +* Merge branch 'bug/prepare-metadata' of github.com:kaskada-ai/kaskada into bug/prepare-metadata ([`4cbdbf2`](https://github.com/kaskada-ai/kaskada/commit/4cbdbf2e9ddf6fee6a488d78412f62c4e3a8d096)) + +* code review comments ([`889cb43`](https://github.com/kaskada-ai/kaskada/commit/889cb4389467994e6b75b3dcfc4bbb947f037463)) + +* comments ([`c7c04fc`](https://github.com/kaskada-ai/kaskada/commit/c7c04fce5c60d1505a2c72cefad3a4ba0b9aa682)) + +* adding some comments ([`7a8dd2c`](https://github.com/kaskada-ai/kaskada/commit/7a8dd2c7bb6962216c4978a0a596874dfd1b1c65)) + +* single registration block ([`8cf17ca`](https://github.com/kaskada-ai/kaskada/commit/8cf17cad23c8b887c85c4a2476eeec4d1ceae40e)) + +* Merge branch 'main' into bug/prepare-metadata ([`4b7ac04`](https://github.com/kaskada-ai/kaskada/commit/4b7ac045f9a033b6239a883659a9a7f41f4a618c)) + +* Merge pull request #37 from kaskada-ai/ulimit + +add ulimit instructions to README ([`4aa5b6f`](https://github.com/kaskada-ai/kaskada/commit/4aa5b6f7cd421beb52e504237ce685e4947e021a)) + +* Merge branch 'main' into feature/add-load-options ([`e97498a`](https://github.com/kaskada-ai/kaskada/commit/e97498a6bb87a1a852d1d3936bfcc3ee7cb4cfe2)) + +* added load_dataframe ([`97172a7`](https://github.com/kaskada-ai/kaskada/commit/97172a7803edacf0506624f50e8e3d1f26af718e)) + +* Merge branch 'main' into ulimit ([`c3bd543`](https://github.com/kaskada-ai/kaskada/commit/c3bd543cb8ee69f9637836b88d911373bbb7b2d2)) + +* fix send/sync problems ([`a928374`](https://github.com/kaskada-ai/kaskada/commit/a928374ae719e2d3898f7da72ede07257cff26bc)) + +* Merge branch 'main' into bug/prepare-metadata ([`058380e`](https://github.com/kaskada-ai/kaskada/commit/058380e512658949580c338bdb7612a4cbf3ec7f)) + +* update table reader ([`e3d92d9`](https://github.com/kaskada-ai/kaskada/commit/e3d92d95f91f7b86a9d94d3816eb7a42f3a1e424)) + +* Merge pull request #40 from kaskada-ai/clients/cli + +added the CLI client from the old repo ([`a610d84`](https://github.com/kaskada-ai/kaskada/commit/a610d84357e43536251cd3fa77504a86367dc241)) + +* setup cli ci ([`331795e`](https://github.com/kaskada-ai/kaskada/commit/331795e524d6cd65264fb0870e8e8449d9132227)) + +* fix wren CI ([`684c10d`](https://github.com/kaskada-ai/kaskada/commit/684c10ddba8efd0c1d0f60478ebe1cad1ba338c6)) + +* initial tests and fixes for the cli ([`d98f8e8`](https://github.com/kaskada-ai/kaskada/commit/d98f8e86d2863b43177b0971c33093880a10b93c)) + +* moved test helper code to shared folder ([`f593b87`](https://github.com/kaskada-ai/kaskada/commit/f593b8792d3dae016c1eb43d6e69a6547888716f)) + +* initial copy of code from old repo ([`deab2c2`](https://github.com/kaskada-ai/kaskada/commit/deab2c2d651e3340fc1107c0d0a274fdc421eeaa)) + +* Merge branch 'main' into bug/prepare-metadata ([`46270f8`](https://github.com/kaskada-ai/kaskada/commit/46270f89bc1a869e46ecc5df61d2aefd49f26c84)) + +* Merge pull request #47 from kaskada-ai/rm/docs-examples + +Reduce use of "weird" syntax features in the examples ([`d432fcf`](https://github.com/kaskada-ai/kaskada/commit/d432fcf418a662412af055a780557561a0b93e13)) + +* Merge pull request #46 from kaskada-ai/feature/add-python-readme-instructions + +Update Readme with Python Client Instructions ([`74a90c9`](https://github.com/kaskada-ai/kaskada/commit/74a90c91121cc049714e5775e68a24828aa91040)) + +* Merge branch 'main' into bug/prepare-metadata ([`85a9a33`](https://github.com/kaskada-ai/kaskada/commit/85a9a3361d28765628da54d746160d10d96fe127)) + +* Merge pull request #45 from kaskada-ai/feature/new-kaskada-repo + +Update Python Client to pull from Kaskada AI Repo ([`c988991`](https://github.com/kaskada-ai/kaskada/commit/c988991d8a04bd907e9427dafc3fdefedd6dbcb3)) + +* Reduce use of "weird" syntax features in the examples + +$input is unusual. There's also an easier way to explain the target +value that doesn't rely on time_of, which is also sort of unusual. ([`ee802f5`](https://github.com/kaskada-ai/kaskada/commit/ee802f564f39a6f5efbb04dee86415546a7350ae)) + +* added examples and fixed formatting ([`9689e22`](https://github.com/kaskada-ai/kaskada/commit/9689e225823fcc56c70100d8a6801d51c133c48f)) + +* updated readme ([`976fc81`](https://github.com/kaskada-ai/kaskada/commit/976fc81f6c73e5b53c913766dd91ab9512fd1c6f)) + +* updated examples ([`65b212f`](https://github.com/kaskada-ai/kaskada/commit/65b212f1b0539f92df027e7b9b160aa392af6c68)) + +* Merge branch 'main' into bug/prepare-metadata ([`bcb5fa5`](https://github.com/kaskada-ai/kaskada/commit/bcb5fa55f8e9ec7392e0b0cd0e8855e3a41e9202)) + +* Merge pull request #44 from kaskada-ai/discussion-permission + +add discussion permission ([`8f25e60`](https://github.com/kaskada-ai/kaskada/commit/8f25e608215d32f16aa07a6de1adcce7373cb266)) + +* add discussion permission ([`ad18379`](https://github.com/kaskada-ai/kaskada/commit/ad1837981d277a3ff97986c7ac149f7aeadfe580)) + +* Merge pull request #41 from kaskada-ai/fix-github-action-if-syntax + +build: try fixing the github action syntax ([`6fc9b30`](https://github.com/kaskada-ai/kaskada/commit/6fc9b30f1ea353aadf0fb80ad1871d22e795db13)) + +* Merge pull request #38 from kaskada-ai/rm/docs + +Rm/docs ([`195ae88`](https://github.com/kaskada-ai/kaskada/commit/195ae8861e14e45523ff8fe95e97065bb97b24f9)) + +* Fix table headers ([`2212eb9`](https://github.com/kaskada-ai/kaskada/commit/2212eb9d8c146ed7935fbbe088d724d8a033e8c1)) + +* More comments ([`975228d`](https://github.com/kaskada-ai/kaskada/commit/975228d66032545e7c8713d15b8037c73daaa623)) + +* Edits per PR ([`cbbbc4c`](https://github.com/kaskada-ai/kaskada/commit/cbbbc4c03b56b71f3824bf38a2ebc884c5340466)) + +* Edit the text down some, and add tables showing results ([`ee2d10f`](https://github.com/kaskada-ai/kaskada/commit/ee2d10f1f601503f703e97e7c1f7f7dd91aa7102)) + +* Streamline overview ([`bd3fc55`](https://github.com/kaskada-ai/kaskada/commit/bd3fc55c6c971a6b5574f3cd691c26da113ee453)) + +* Merge pull request #39 from kaskada-ai/docker-permissions + +build: add permissions to push to ghcr.io ([`e8b1fb6`](https://github.com/kaskada-ai/kaskada/commit/e8b1fb68931157e9c5a18808c58291f88567c258)) + +* add ulimit instructions to README ([`b7eddda`](https://github.com/kaskada-ai/kaskada/commit/b7eddda42bc2f3077f89ba014c5c23031a5f55e4)) + +* Merge pull request #36 from kaskada-ai/build-docker-login + +docker login ([`c4767c0`](https://github.com/kaskada-ai/kaskada/commit/c4767c0c6440133d996e5fbe1a54e7736d0c42dc)) + +* docker login ([`63ba502`](https://github.com/kaskada-ai/kaskada/commit/63ba5029542e0f88169221170c707ccd11e02970)) + +* Merge pull request #35 from kaskada-ai/rm/legal + +Minor changes to license file and README updates ([`43295d4`](https://github.com/kaskada-ai/kaskada/commit/43295d494393e862c41d94b72a597471a13b1ed3)) + +* Rename API -> Manager ([`899ec58`](https://github.com/kaskada-ai/kaskada/commit/899ec58182425f52e9b3cff6f4e9c04ee918d0d6)) + +* Minor changes to license file and README updates ([`ac4e185`](https://github.com/kaskada-ai/kaskada/commit/ac4e18525798b35ea09a422286d3ebed6960ea8f)) + +* Merge pull request #34 from kaskada-ai/fix-permissions + +build: add package permissions to release ([`8ef3d9e`](https://github.com/kaskada-ai/kaskada/commit/8ef3d9e743be4d357fdb02005f06bbd56d45c914)) + +* Merge pull request #33 from kaskada-ai/build-dockerfile + +build: leave the assets where they are ([`527708e`](https://github.com/kaskada-ai/kaskada/commit/527708ec885cc5a3077da6a10b8656b53f8ad4ae)) + +* Merge pull request #32 from kaskada-ai/fix-dockerfile + +build: fix dockerflie ([`c6835bf`](https://github.com/kaskada-ai/kaskada/commit/c6835bf8e679042697ff6159ef3a1ffa62ec74dc)) + +* Merge pull request #31 from kaskada-ai/fix-placeholder + +build: variable references ([`1613fcf`](https://github.com/kaskada-ai/kaskada/commit/1613fcf73392832d4b2ce9de3b6b44a3b61d2537)) + +* Merge pull request #30 from kaskada-ai/release-drafter-concurrency + +don't label prs during releases ([`370d135`](https://github.com/kaskada-ai/kaskada/commit/370d135f4d1e3cc6c6d94165f7a22ae222a6c2a6)) + +* don't label prs during releases ([`914d694`](https://github.com/kaskada-ai/kaskada/commit/914d694acd1749a9f7d92b167d9a92e8c202a0f8)) + +* Merge pull request #25 from kaskada-ai/rm/docs + +Update getting started to be closer to the slide talk track. ([`f6a7346`](https://github.com/kaskada-ai/kaskada/commit/f6a734662b60dc86ca9569eca8048922ee2f2eb1)) + +* Merge branch 'main' into bug/prepare-metadata ([`0d77332`](https://github.com/kaskada-ai/kaskada/commit/0d773323271f670ab642863290859586744dfbc5)) + +* added changes ([`dee05a2`](https://github.com/kaskada-ai/kaskada/commit/dee05a2584c3388deb3b3bf9ba2e321907ed900a)) + +* Merge pull request #28 from kaskada-ai/release-drafter-fix + +build: work on release drafter ([`27568b2`](https://github.com/kaskada-ai/kaskada/commit/27568b219df323b58d18425fd4197aebaea6952e)) + +* Merge pull request #27 from kaskada-ai/fix-tag-computation + +build: fix tag computation ([`e6b8c6e`](https://github.com/kaskada-ai/kaskada/commit/e6b8c6effec06725042aa0d2ade2138b96194837)) + +* Merge pull request #26 from kaskada-ai/release-drafter-commitish + +build: set commitish to the sha ([`796af33`](https://github.com/kaskada-ai/kaskada/commit/796af33b5ff2739cad16a55656e3fb8a7d427440)) + +* Update getting started to be closer to the slide talk track. + +This still needs work, but I thought I'd do this incrementally as I have +time. ([`5499bf4`](https://github.com/kaskada-ai/kaskada/commit/5499bf407e319f46ddeb61187d96ccfe89f6110c)) + +* Merge pull request #24 from kaskada-ai/release-draft-tag + +build: specify the tag correctly ([`1c91325`](https://github.com/kaskada-ai/kaskada/commit/1c91325bdc11572490c8f2e9c622ca91e171cea8)) + +* Merge pull request #23 from kaskada-ai/feature/example-updates + +Example Notebook Updates ([`3e8f688`](https://github.com/kaskada-ai/kaskada/commit/3e8f6889f9f1cdb2eaf1cff6e98771305abf44d0)) + +* Merge pull request #22 from kaskada-ai/docker-current-time + +build: determine current date ([`ff606b6`](https://github.com/kaskada-ai/kaskada/commit/ff606b655d4a6b7a7b37461547c131795bca781a)) + +* Merge branch 'main' into feature/example-updates ([`4505220`](https://github.com/kaskada-ai/kaskada/commit/4505220d5330fee5e94a47a2264e8bfe8341a91b)) + +* added new requirements ([`fb84da9`](https://github.com/kaskada-ai/kaskada/commit/fb84da9397547d50486ab46df1bdc1d312634a27)) + +* removed temp changes ([`988249a`](https://github.com/kaskada-ai/kaskada/commit/988249a9f1424dc1814dad4b535dd772939e530a)) + +* updated examples ([`86e98b5`](https://github.com/kaskada-ai/kaskada/commit/86e98b52284171a8d8cc13ac77928ba2cedc672b)) + +* Merge pull request #21 from kaskada-ai/release-docker + +build: attempt to fix docker build ([`33beff1`](https://github.com/kaskada-ai/kaskada/commit/33beff1e4cbd790d1cbee3aa618ba94596abee13)) + +* comments ([`0c0df2f`](https://github.com/kaskada-ai/kaskada/commit/0c0df2f13978931ae7fe643e9de7611d9373866d)) + +* comment ([`aca01d9`](https://github.com/kaskada-ai/kaskada/commit/aca01d905156520add793863c6839ea9e170570c)) + +* Merge pull request #14 from kaskada-ai/feature/more-python-fixes + +Fenlmagic Rendering Fix ([`bab6986`](https://github.com/kaskada-ai/kaskada/commit/bab6986d8fb94110a75f6da084f5788c272b95e9)) + +* Merge pull request #20 from kaskada-ai/fix-engine-release-wren + +build: fix relative paths ([`d2c85be`](https://github.com/kaskada-ai/kaskada/commit/d2c85bedf5ee2fe9eb63988c589051bb559ce761)) + +* Merge pull request #19 from kaskada-ai/engine-release-go-install + +build: change cache-dependency-path ([`d14ef2d`](https://github.com/kaskada-ai/kaskada/commit/d14ef2d1a2977736aba566140c7183f81099676b)) + +* Merge pull request #18 from kaskada-ai/release-drafter-2 + +build: remove invalid value ([`4c15a34`](https://github.com/kaskada-ai/kaskada/commit/4c15a341f0bfea536fbd899ac5818bd8c7436cf1)) + +* Merge pull request #15 from kaskada-ai/release-drafter + +build: Fix release drafter ([`2917a19`](https://github.com/kaskada-ai/kaskada/commit/2917a19b666ac0c7c9d485b7343f905ffba5902e)) + +* Merge branch 'main' into feature/more-python-fixes ([`d394ec2`](https://github.com/kaskada-ai/kaskada/commit/d394ec2a209d9a78e0d110441f49a3eab6372a5f)) + +* Merge branch 'feature/more-python-fixes' of github.com:kaskada-ai/kaskada into feature/more-python-fixes ([`6d2c526`](https://github.com/kaskada-ai/kaskada/commit/6d2c526adb4670973e33cb21191e585ec853423e)) + +* code review comments ([`f045d31`](https://github.com/kaskada-ai/kaskada/commit/f045d3119a9ce6eecf1d1189190cdb3b7c660682)) + +* Merge branch 'main' into feature/more-python-fixes ([`bcbba5c`](https://github.com/kaskada-ai/kaskada/commit/bcbba5cb9f09274e243ea9159fe045fc9c5c5f7a)) + +* Merge pull request #11 from kaskada-ai/cargo-lock + +update Cargo.lock for 0.1.1 release ([`be2b653`](https://github.com/kaskada-ai/kaskada/commit/be2b65358471bad811e54f8b86331d702020c640)) + +* Merge pull request #12 from kaskada-ai/go/workspace + +split golang code into multiple modules ([`3f3a7b9`](https://github.com/kaskada-ai/kaskada/commit/3f3a7b9ac219bf4b480d6742dca9f24398470662)) + +* fixed integration tests ([`552359e`](https://github.com/kaskada-ai/kaskada/commit/552359e0e7f43df26c860f333e62a007edbc28c8)) + +* fix build ([`6fbb3d2`](https://github.com/kaskada-ai/kaskada/commit/6fbb3d26ad9febcd16623f374d260b6d9f92d81f)) + +* fixed module pathing ([`322313a`](https://github.com/kaskada-ai/kaskada/commit/322313aaf452306dbe3e3db7a82ff485da6f65c8)) + +* updated build and test paths ([`76ccb14`](https://github.com/kaskada-ai/kaskada/commit/76ccb14057a75bc3b31860a7ccbf8ec080ec0fa0)) + +* created new golang modules and updated all paths ([`de3f84c`](https://github.com/kaskada-ai/kaskada/commit/de3f84c87998c39f7064c59d5f751b0cf4deb5ee)) + +* update Cargo.lock for 0.1.1 release ([`ce75c26`](https://github.com/kaskada-ai/kaskada/commit/ce75c26081d5c1dd62b09685dbb1cf5157afc8ed)) + +* added new version ([`f1f363f`](https://github.com/kaskada-ai/kaskada/commit/f1f363fb5b5afca50b35da57bc16445d387b5fa1)) + +* updated fenlmagic ([`362f3bb`](https://github.com/kaskada-ai/kaskada/commit/362f3bb8b0a78a447f0706d165af00e0cedfd5f9)) + +* added small fixes ([`b01f688`](https://github.com/kaskada-ai/kaskada/commit/b01f688f13b139569267f12e5959e91a4c2e7513)) + +* Merge pull request #6 from kaskada-ai/kevinjnguyen-patch-1 + +Update the repo link in release process doc ([`831355b`](https://github.com/kaskada-ai/kaskada/commit/831355b5df7228e320eee662f1c475bc59e20a09)) + +* Merge pull request #8 from kaskada-ai/feature/output-csv + +Update Fenlmagic with output parameter ([`aa452e9`](https://github.com/kaskada-ai/kaskada/commit/aa452e92f6bb465e94ae53878a1aec7bd5d2eb14)) + +* Merge pull request #9 from kaskada-ai/tsk-docs-remove-collab + +docs: comment out non-working collab tutorials ([`b4547f5`](https://github.com/kaskada-ai/kaskada/commit/b4547f58ca5afe6efdbe12ae4b7fcda6a06fae69)) + +* Merge pull request #5 from kaskada-ai/tsk-docs-kaskada-setup + +docs: kaskada in jupyter quick setup ([`f8861f3`](https://github.com/kaskada-ai/kaskada/commit/f8861f34ae31428ddef9dd063f0ef7c3a0900b29)) + +* Merge pull request #7 from kaskada-ai/tsk-release-fix + +trying expression in release.yml ([`992794a`](https://github.com/kaskada-ai/kaskada/commit/992794a8792574ace8575e7a8bacc7c5718173b0)) + +* Merge branch 'main' into feature/output-csv ([`8f40569`](https://github.com/kaskada-ai/kaskada/commit/8f4056912aaf979cc01df1360f613fe588cc2f67)) + +* added csv notebook ([`24809c5`](https://github.com/kaskada-ai/kaskada/commit/24809c57415a38fab09835de656db343ba916c4b)) + +* added fenlmagic ([`f70c624`](https://github.com/kaskada-ai/kaskada/commit/f70c624d57bc23d798a0abfacd324f8a1ac6dd44)) + +* trying expression in release.yml ([`9909ce0`](https://github.com/kaskada-ai/kaskada/commit/9909ce029ed7c6768e32d3402700a957047d062f)) + +* Merge pull request #4 from kaskada-ai/release/0.1.1 + +Kaskada Release 0.1.1 ([`3bfb078`](https://github.com/kaskada-ai/kaskada/commit/3bfb078279ec7af70e1724c9e82fc4f5769cdabe)) + +* Update the repo link in release process doc ([`0427ed7`](https://github.com/kaskada-ai/kaskada/commit/0427ed7d7d22b0117a94f913223d15e34adb703b)) + +* address comments ([`a4c3492`](https://github.com/kaskada-ai/kaskada/commit/a4c3492bf23415c17631898fa15f610620c45528)) + +* Merge branch 'main' into release/0.1.1 ([`b842053`](https://github.com/kaskada-ai/kaskada/commit/b842053eea929241e2ad03e02c2d05e1dac4c6b6)) + +* code review comments ([`a9d2ab6`](https://github.com/kaskada-ai/kaskada/commit/a9d2ab63722c6f4d126342b8c1ed83ed3375d1a4)) + +* Merge pull request #3 from kaskada-ai/feature/python-schema-fix + +Schema Rendering Fix for Python Client ([`c64217a`](https://github.com/kaskada-ai/kaskada/commit/c64217a66ee285a0c94b7d67fe3f62114d6fb4b3)) + +* release version bump ([`45219dd`](https://github.com/kaskada-ai/kaskada/commit/45219dd2ed8f68f4d9752d132be58336e0e8ef70)) + +* add schema fix ([`620104f`](https://github.com/kaskada-ai/kaskada/commit/620104f696f123c86e91e4bec86def1ac2ad7dfa)) + +* Merge pull request #1 from kaskada-ai/feature/python-client-logging + +Add Python Debug Logging ([`3dff155`](https://github.com/kaskada-ai/kaskada/commit/3dff155516c0946561045a4c78722a6e3eaace20)) + +* added example notebook ([`4f8a754`](https://github.com/kaskada-ai/kaskada/commit/4f8a75414e2a36ff5a3456afae17a6faf2ac6606)) + +* added logging changes ([`3634d40`](https://github.com/kaskada-ai/kaskada/commit/3634d4014a562d111a7cbb0627772e46004fd51d)) + +* seeding kaskada repo ([`00bad4e`](https://github.com/kaskada-ai/kaskada/commit/00bad4e9bd015252fedff27f6ff4c6538fa2ff23)) diff --git a/python/Cargo.lock b/python/Cargo.lock new file mode 100644 index 000000000..689b222f6 --- /dev/null +++ b/python/Cargo.lock @@ -0,0 +1,5038 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "adler32" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.10", + "once_cell", + "version_check", +] + +[[package]] +name = "ahash" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +dependencies = [ + "cfg-if", + "const-random", + "getrandom 0.2.10", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" +dependencies = [ + "memchr", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "allocator-api2" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is-terminal", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" + +[[package]] +name = "anstyle-parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "anstyle-wincon" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +dependencies = [ + "anstyle", + "windows-sys", +] + +[[package]] +name = "anyhow" +version = "1.0.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" +dependencies = [ + "backtrace", +] + +[[package]] +name = "approx" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0e60b75072ecd4168020818c0107f2857bb6c4e64252d8d3983f6263b40a5c3" +dependencies = [ + "num-traits", +] + +[[package]] +name = "arrow" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2feeebd77b34b0bc88f224e06d01c27da4733997cc4789a4e056196656cdc59a" +dependencies = [ + "ahash 0.8.3", + "arrow-arith", + "arrow-array", + "arrow-buffer", + "arrow-cast", + "arrow-csv", + "arrow-data", + "arrow-ipc", + "arrow-json", + "arrow-ord", + "arrow-row", + "arrow-schema", + "arrow-select", + "arrow-string", + "pyo3", +] + +[[package]] +name = "arrow-arith" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7173f5dc49c0ecb5135f52565af33afd3fdc9a12d13bd6f9973e8b96305e4b2e" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "chrono", + "half", + "num", +] + +[[package]] +name = "arrow-array" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63d7ea725f7d1f8bb2cffc53ef538557e95fc802e217d5be25122d402e22f3d0" +dependencies = [ + "ahash 0.8.3", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "chrono", + "half", + "hashbrown 0.14.0", + "num", +] + +[[package]] +name = "arrow-buffer" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdbe439e077f484e5000b9e1d47b5e4c0d15f2b311a8f5bcc682553d5d67a722" +dependencies = [ + "half", + "num", +] + +[[package]] +name = "arrow-cast" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93913cc14875770aa1eef5e310765e855effa352c094cb1c7c00607d0f37b4e1" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "arrow-select", + "chrono", + "half", + "lexical-core", + "num", +] + +[[package]] +name = "arrow-csv" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef55b67c55ed877e6fe7b923121c19dae5e31ca70249ea2779a17b58fb0fbd9a" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-cast", + "arrow-data", + "arrow-schema", + "chrono", + "csv", + "csv-core", + "lazy_static", + "lexical-core", + "regex", +] + +[[package]] +name = "arrow-data" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4f4f4a3c54614126a71ab91f6631c9743eb4643d6e9318b74191da9dc6e028b" +dependencies = [ + "arrow-buffer", + "arrow-schema", + "half", + "num", +] + +[[package]] +name = "arrow-ipc" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d41a3659f984a524ef1c2981d43747b24d8eec78e2425267fcd0ef34ce71cd18" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-cast", + "arrow-data", + "arrow-schema", + "flatbuffers", +] + +[[package]] +name = "arrow-json" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10b95faa95a378f56ef32d84cc0104ea998c39ef7cd1faaa6b4cebf8ea92846d" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-cast", + "arrow-data", + "arrow-schema", + "chrono", + "half", + "indexmap 2.0.0", + "lexical-core", + "num", + "serde", + "serde_json", +] + +[[package]] +name = "arrow-ord" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c68549a4284d9f8b39586afb8d5ff8158b8f0286353a4844deb1d11cf1ba1f26" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "arrow-select", + "half", + "num", +] + +[[package]] +name = "arrow-row" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a75a4a757afc301ce010adadff54d79d66140c4282ed3de565f6ccb716a5cf3" +dependencies = [ + "ahash 0.8.3", + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "half", + "hashbrown 0.14.0", +] + +[[package]] +name = "arrow-schema" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bebcb57eef570b15afbcf2d07d813eb476fde9f6dd69c81004d6476c197e87e" +dependencies = [ + "bitflags 2.3.3", + "serde", +] + +[[package]] +name = "arrow-select" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6e2943fa433a48921e914417173816af64eef61c0a3d448280e6c40a62df221" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "num", +] + +[[package]] +name = "arrow-string" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbc92ed638851774f6d7af1ad900b92bc1486746497511868b4298fcbcfa35af" +dependencies = [ + "arrow-array", + "arrow-buffer", + "arrow-data", + "arrow-schema", + "arrow-select", + "num", + "regex", + "regex-syntax 0.7.4", +] + +[[package]] +name = "ascii-canvas" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" +dependencies = [ + "term", +] + +[[package]] +name = "async-attributes" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3203e79f4dd9bdda415ed03cf14dae5a2bf775c683a00f94e9cd1faf0f596e5" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener", + "futures-core", +] + +[[package]] +name = "async-executor" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fa3dc5f2a8564f07759c008b9109dc0d39de92a88d5588b8a5036d286383afb" +dependencies = [ + "async-lock", + "async-task", + "concurrent-queue", + "fastrand 1.9.0", + "futures-lite", + "slab", +] + +[[package]] +name = "async-global-executor" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776" +dependencies = [ + "async-channel", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-io" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" +dependencies = [ + "async-lock", + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-lite", + "log", + "parking", + "polling", + "rustix 0.37.23", + "slab", + "socket2", + "waker-fn", +] + +[[package]] +name = "async-lock" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7" +dependencies = [ + "event-listener", +] + +[[package]] +name = "async-native-tls" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d57d4cec3c647232e1094dc013546c0b33ce785d8aeb251e1f20dfaf8a9a13fe" +dependencies = [ + "futures-util", + "native-tls", + "thiserror", + "url", +] + +[[package]] +name = "async-once-cell" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9338790e78aa95a416786ec8389546c4b6a1dfc3dc36071ed9518a9413a542eb" + +[[package]] +name = "async-process" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9d28b1d97e08915212e2e45310d47854eafa69600756fc735fb788f75199c9" +dependencies = [ + "async-io", + "async-lock", + "autocfg", + "blocking", + "cfg-if", + "event-listener", + "futures-lite", + "rustix 0.37.23", + "signal-hook", + "windows-sys", +] + +[[package]] +name = "async-std" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +dependencies = [ + "async-attributes", + "async-channel", + "async-global-executor", + "async-io", + "async-lock", + "async-process", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "async-task" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc7ab41815b3c653ccd2978ec3255c81349336702dfdf62ee6f7069b12a3aae" + +[[package]] +name = "async-trait" +version = "0.1.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc6dde6e4ed435a4c1ee4e73592f5ba9da2151af10076cc04858746af9352d09" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "asynchronous-codec" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4057f2c32adbb2fc158e22fb38433c8e9bbf76b75a4732c7c0cbaf695fb65568" +dependencies = [ + "bytes", + "futures-sink", + "futures-util", + "memchr", + "pin-project-lite", +] + +[[package]] +name = "atomic-waker" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1181e1e0d1fce796a03db1ae795d67167da795f9cf4a39c37589e85ef57f26d3" + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "avro-rs" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ece550dd6710221de9bcdc1697424d8eee4fc4ca7e017479ea9d50c348465e37" +dependencies = [ + "byteorder", + "digest 0.9.0", + "lazy_static", + "libflate", + "num-bigint 0.2.6", + "rand 0.7.3", + "serde", + "serde_json", + "strum 0.18.0", + "strum_macros 0.18.0", + "thiserror", + "typed-builder", + "uuid 0.8.2", + "zerocopy", +] + +[[package]] +name = "avro-schema" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5281855b39aba9684d2f47bf96983fbfd8f1725f12fabb0513a8ab879647bbd" +dependencies = [ + "fallible-streaming-iterator", + "serde", + "serde_json", +] + +[[package]] +name = "axum" +version = "0.6.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a1de45611fdb535bfde7b7de4fd54f4fd2b17b1737c0a59b69bf9b92074b8c" +dependencies = [ + "async-trait", + "axum-core", + "bitflags 1.3.2", + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "backtrace" +version = "0.3.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" + +[[package]] +name = "beef" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" + +[[package]] +name = "bigdecimal" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "454bca3db10617b88b566f205ed190aedb0e0e6dd4cad61d3988a72e8c5594cb" +dependencies = [ + "autocfg", + "libm", + "num-bigint 0.4.3", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bindgen" +version = "0.65.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5" +dependencies = [ + "bitflags 1.3.2", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "peeking_take_while", + "prettyplease 0.2.12", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 2.0.27", +] + +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "serde", + "tap", + "wyz", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "blocking" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77231a1c8f801696fc0123ec6150ce92cffb8e164a02afb9c8ddee0e9b65ad65" +dependencies = [ + "async-channel", + "async-lock", + "async-task", + "atomic-waker", + "fastrand 1.9.0", + "futures-lite", + "log", +] + +[[package]] +name = "brotli" +version = "3.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "2.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bumpalo" +version = "3.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" + +[[package]] +name = "bzip2-sys" +version = "0.1.11+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" +dependencies = [ + "cc", + "libc", + "pkg-config", +] + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +dependencies = [ + "jobserver", +] + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cfg_aliases" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" + +[[package]] +name = "chrono" +version = "0.4.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "time", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "chronoutil" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "154aa5253c981d51e9466afc1e9ce41631197837fd1c41ee931008f229b8a3d7" +dependencies = [ + "chrono", +] + +[[package]] +name = "clang-sys" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "4.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd304a20bff958a57f04c4e96a2e7594cc4490a0e809cbd48bb6437edaa452d" +dependencies = [ + "clap_builder", + "clap_derive", + "once_cell", +] + +[[package]] +name = "clap_builder" +version = "4.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01c6a3f08f1fe5662a35cfe393aec09c4df95f60ee93b7556505260f75eee9e1" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "clap_lex" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + +[[package]] +name = "concurrent-queue" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62ec6771ecfa0762d24683ee5a32ad78487a3d3afdc0fb8cae19d2c5deb50b7c" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "const-random" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368a7a772ead6ce7e1de82bfb04c485f3db8ec744f72925af5735e29a22cc18e" +dependencies = [ + "const-random-macro", + "proc-macro-hack", +] + +[[package]] +name = "const-random-macro" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d7d6ab3c3a2282db210df5f02c4dab6e0a7057af0fb7ebd4070f30fe05c0ddb" +dependencies = [ + "getrandom 0.2.10", + "once_cell", + "proc-macro-hack", + "tiny-keccak", +] + +[[package]] +name = "const_format" +version = "0.2.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c990efc7a285731f9a4378d81aff2f0e85a2c8781a05ef0f8baa8dac54d0ff48" +dependencies = [ + "const_format_proc_macros", +] + +[[package]] +name = "const_format_proc_macros" +version = "0.2.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e026b6ce194a874cb9cf32cd5772d1ef9767cc8fcb5765948d74f37a9d8b2bf6" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" + +[[package]] +name = "cpu-time" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9e393a7668fe1fad3075085b86c781883000b4ede868f43627b34a87c8b7ded" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "cpufeatures" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "csv" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626ae34994d3d8d668f4269922248239db4ae42d538b14c398b74a52208e8086" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" +dependencies = [ + "memchr", +] + +[[package]] +name = "dashmap" +version = "5.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6943ae99c34386c84a470c499d3414f66502a41340aa895406e0d2e4a207b91d" +dependencies = [ + "cfg-if", + "hashbrown 0.14.0", + "lock_api", + "once_cell", + "parking_lot_core 0.9.8", +] + +[[package]] +name = "data-encoding" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" + +[[package]] +name = "decorum" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "281759d3c8a14f5c3f0c49363be56810fcd7f910422f97f2db850c2920fde5cf" +dependencies = [ + "approx", + "num-traits", + "serde", + "serde_derive", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn 1.0.109", +] + +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + +[[package]] +name = "edit-distance" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbaaaf38131deb9ca518a274a45bfdb8771f139517b073b16c2d3d32ae5037b" + +[[package]] +name = "egg" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96beaf9d35dbc4686bc86a4ecb851fd6a406f0bf32d9f646b1225a5c5cf5b5d7" +dependencies = [ + "env_logger", + "fxhash", + "hashbrown 0.12.3", + "indexmap 1.9.3", + "instant", + "log", + "smallvec", + "symbol_table", + "symbolic_expressions", + "thiserror", +] + +[[package]] +name = "either" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" + +[[package]] +name = "ena" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c533630cf40e9caa44bd91aadc88a75d75a4c3a12b4cfde353cbed41daa1e1f1" +dependencies = [ + "log", +] + +[[package]] +name = "encoding_rs" +version = "0.8.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "enum-map" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "017b207acb4cc917f4c31758ed95c0bc63ddb0f358b22eb38f80a2b2a43f6b1f" +dependencies = [ + "enum-map-derive", +] + +[[package]] +name = "enum-map-derive" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8560b409800a72d2d7860f8e5f4e0b0bd22bea6a352ea2a9ce30ccdef7f16d2f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "env_logger" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" +dependencies = [ + "log", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "erased-serde" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da96524cc884f6558f1769b6c46686af2fe8e8b4cd253bd5a3cdba8181b8e070" +dependencies = [ + "serde", +] + +[[package]] +name = "errno" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "error-stack" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f00447f331c7f726db5b8532ebc9163519eed03c6d7c8b73c90b3ff5646ac85" +dependencies = [ + "anyhow", + "rustc_version", + "tracing-error", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "fallible-iterator" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" + +[[package]] +name = "fallible-streaming-iterator" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + +[[package]] +name = "fastrand" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "flatbuffers" +version = "23.5.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dac53e22462d78c16d64a1cd22371b54cc3fe94aa15e7886a2fa6e5d1ab8640" +dependencies = [ + "bitflags 1.3.2", + "rustc_version", +] + +[[package]] +name = "flate2" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" + +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" + +[[package]] +name = "futures-lite" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" +dependencies = [ + "fastrand 1.9.0", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite", + "waker-fn", +] + +[[package]] +name = "futures-macro" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "futures-sink" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" + +[[package]] +name = "futures-task" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" + +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" + +[[package]] +name = "futures-util" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "gimli" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "h2" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap 1.9.3", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "half" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc52e53916c08643f1b56ec082790d1e86a32e58dc5268f897f313fbae7b4872" +dependencies = [ + "cfg-if", + "crunchy", + "num-traits", + "serde", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash 0.7.6", +] + +[[package]] +name = "hashbrown" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +dependencies = [ + "ahash 0.8.3", + "allocator-api2", + "serde", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" + +[[package]] +name = "http" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" +dependencies = [ + "futures-util", + "http", + "hyper", + "rustls", + "tokio", + "tokio-rustls", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +dependencies = [ + "equivalent", + "hashbrown 0.14.0", +] + +[[package]] +name = "indoc" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306" + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "integer-encoding" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bb03732005da905c88227371639bf1ad885cc712789c011c31c5fb3ab3ccf02" + +[[package]] +name = "inventory" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a53088c87cf71c9d4f3372a2cb9eea1e7b8a0b1bf8b7f7d23fe5b76dbb07e63b" + +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys", +] + +[[package]] +name = "ipnet" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" + +[[package]] +name = "is-terminal" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +dependencies = [ + "hermit-abi", + "rustix 0.38.4", + "windows-sys", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "jobserver" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "kaskada" +version = "0.6.0-a.1" +dependencies = [ + "arrow", + "derive_more", + "error-stack", + "futures", + "itertools 0.11.0", + "mimalloc", + "pyo3", + "pyo3-asyncio", + "sparrow-session", + "tokio", + "tracing", +] + +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "lalrpop" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da4081d44f4611b66c6dd725e6de3169f9f63905421e8626fcb86b6a898998b8" +dependencies = [ + "ascii-canvas", + "bit-set", + "diff", + "ena", + "is-terminal", + "itertools 0.10.5", + "lalrpop-util", + "petgraph", + "pico-args", + "regex", + "regex-syntax 0.7.4", + "string_cache", + "term", + "tiny-keccak", + "unicode-xid", +] + +[[package]] +name = "lalrpop-util" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f35c735096c0293d313e8f2a641627472b83d01b937177fe76e5e2708d31e0d" +dependencies = [ + "regex", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "lexical-core" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cde5de06e8d4c2faabc400238f9ae1c74d5412d03a7bd067645ccbc47070e46" +dependencies = [ + "lexical-parse-float", + "lexical-parse-integer", + "lexical-util", + "lexical-write-float", + "lexical-write-integer", +] + +[[package]] +name = "lexical-parse-float" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683b3a5ebd0130b8fb52ba0bdc718cc56815b6a097e28ae5a6997d0ad17dc05f" +dependencies = [ + "lexical-parse-integer", + "lexical-util", + "static_assertions", +] + +[[package]] +name = "lexical-parse-integer" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d0994485ed0c312f6d965766754ea177d07f9c00c9b82a5ee62ed5b47945ee9" +dependencies = [ + "lexical-util", + "static_assertions", +] + +[[package]] +name = "lexical-util" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5255b9ff16ff898710eb9eb63cb39248ea8a5bb036bea8085b1a767ff6c4e3fc" +dependencies = [ + "static_assertions", +] + +[[package]] +name = "lexical-write-float" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accabaa1c4581f05a3923d1b4cfd124c329352288b7b9da09e766b0668116862" +dependencies = [ + "lexical-util", + "lexical-write-integer", + "static_assertions", +] + +[[package]] +name = "lexical-write-integer" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1b6f3d1f4422866b68192d62f77bc5c700bee84f3069f2469d7bc8c77852446" +dependencies = [ + "lexical-util", + "static_assertions", +] + +[[package]] +name = "libc" +version = "0.2.147" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" + +[[package]] +name = "libflate" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ff4ae71b685bbad2f2f391fe74f6b7659a34871c08b210fdc039e43bee07d18" +dependencies = [ + "adler32", + "crc32fast", + "libflate_lz77", +] + +[[package]] +name = "libflate_lz77" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a52d3a8bfc85f250440e4424db7d857e241a3aebbbe301f3eb606ab15c39acbf" +dependencies = [ + "rle-decode-fast", +] + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if", + "winapi", +] + +[[package]] +name = "libm" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" + +[[package]] +name = "libmimalloc-sys" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4ac0e912c8ef1b735e92369695618dc5b1819f5a7bf3f167301a3ba1cea515e" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "librocksdb-sys" +version = "0.11.0+8.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3386f101bcb4bd252d8e9d2fb41ec3b0862a15a62b478c355b2982efa469e3e" +dependencies = [ + "bindgen", + "bzip2-sys", + "cc", + "glob", + "libc", + "libz-sys", + "lz4-sys", +] + +[[package]] +name = "libz-sys" +version = "1.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d97137b25e321a73eef1418d1d5d2eda4d77e12813f8e6dead84bc52c5870a7b" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" + +[[package]] +name = "linux-raw-sys" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" + +[[package]] +name = "lock_api" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" +dependencies = [ + "value-bag", +] + +[[package]] +name = "logos" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf8b031682c67a8e3d5446840f9573eb7fe26efe7ec8d195c9ac4c0647c502f1" +dependencies = [ + "logos-derive", +] + +[[package]] +name = "logos-derive" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d849148dbaf9661a6151d1ca82b13bb4c4c128146a88d05253b38d4e2f496c" +dependencies = [ + "beef", + "fnv", + "proc-macro2", + "quote", + "regex-syntax 0.6.29", + "syn 1.0.109", +] + +[[package]] +name = "lz4" +version = "1.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e9e2dd86df36ce760a60f6ff6ad526f7ba1f14ba0356f8254fb6905e6494df1" +dependencies = [ + "libc", + "lz4-sys", +] + +[[package]] +name = "lz4-sys" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57d27b317e207b10f69f5e75494119e391a96f48861ae870d1da6edac98ca900" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "matchit" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40" + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mimalloc" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e2894987a3459f3ffb755608bd82188f8ed00d0ae077f1edea29c068d639d98" +dependencies = [ + "libmimalloc-sys", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +dependencies = [ + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "num" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05180d69e3da0e530ba2a1dae5110317e49e3b7f3d41be227dc5f92e49ee7af" +dependencies = [ + "num-bigint 0.4.3", + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-complex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +dependencies = [ + "autocfg", + "num-bigint 0.4.3", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" +dependencies = [ + "memchr", +] + +[[package]] +name = "object_store" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27c776db4f332b571958444982ff641d2531417a326ca368995073b639205d58" +dependencies = [ + "async-trait", + "base64 0.21.2", + "bytes", + "chrono", + "futures", + "humantime", + "hyper", + "itertools 0.10.5", + "parking_lot 0.12.1", + "percent-encoding", + "quick-xml", + "rand 0.8.5", + "reqwest", + "ring", + "rustls-pemfile", + "serde", + "serde_json", + "snafu", + "tokio", + "tracing", + "url", + "walkdir", +] + +[[package]] +name = "once_cell" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "openssl" +version = "0.10.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-src" +version = "111.26.0+1.1.1u" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efc62c9f12b22b8f5208c23a7200a442b2e5999f8bdf80233852122b5a4f6f37" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "ordered-float" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87" +dependencies = [ + "num-traits", +] + +[[package]] +name = "owning_ref" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ff55baddef9e4ad00f88b6c743a2a8062d4c6ade126c2a528644b8e444d52ce" +dependencies = [ + "stable_deref_trait", +] + +[[package]] +name = "parking" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.8", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.3.5", + "smallvec", + "windows-targets", +] + +[[package]] +name = "parquet" +version = "43.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec7267a9607c3f955d4d0ac41b88a67cecc0d8d009173ad3da390699a6cb3750" +dependencies = [ + "ahash 0.8.3", + "arrow-array", + "arrow-buffer", + "arrow-cast", + "arrow-data", + "arrow-ipc", + "arrow-schema", + "arrow-select", + "base64 0.21.2", + "brotli", + "bytes", + "chrono", + "flate2", + "futures", + "hashbrown 0.14.0", + "lz4", + "num", + "num-bigint 0.4.3", + "paste", + "seq-macro", + "snap", + "thrift", + "tokio", + "twox-hash", + "zstd", +] + +[[package]] +name = "parse-display" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6509d08722b53e8dafe97f2027b22ccbe3a5db83cb352931e9716b0aa44bc5c" +dependencies = [ + "once_cell", + "parse-display-derive", + "regex", +] + +[[package]] +name = "parse-display-derive" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68517892c8daf78da08c0db777fcc17e07f2f63ef70041718f8a7630ad84f341" +dependencies = [ + "once_cell", + "proc-macro2", + "quote", + "regex", + "regex-syntax 0.7.4", + "structmeta", + "syn 2.0.27", +] + +[[package]] +name = "paste" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" + +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + +[[package]] +name = "pem" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" +dependencies = [ + "base64 0.13.1", +] + +[[package]] +name = "percent-encoding" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" + +[[package]] +name = "petgraph" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +dependencies = [ + "fixedbitset", + "indexmap 1.9.3", +] + +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pico-args" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" + +[[package]] +name = "pin-project" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "030ad2bc4db10a8944cb0d837f158bdfec4d4a4873ab701a95046770d11f8842" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" + +[[package]] +name = "polling" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" +dependencies = [ + "autocfg", + "bitflags 1.3.2", + "cfg-if", + "concurrent-queue", + "libc", + "log", + "pin-project-lite", + "windows-sys", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "prettyplease" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" +dependencies = [ + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "prettyplease" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c64d9ba0963cdcea2e1b2230fbae2bab30eb25a174be395c41e764bfb65dd62" +dependencies = [ + "proc-macro2", + "syn 2.0.27", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "1.0.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "proptest" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e35c06b98bf36aba164cc17cb25f7e232f5c4aeea73baa14b8a9f0d92dbfa65" +dependencies = [ + "bit-set", + "bitflags 1.3.2", + "byteorder", + "lazy_static", + "num-traits", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rand_xorshift", + "regex-syntax 0.6.29", + "rusty-fork", + "tempfile", + "unarray", +] + +[[package]] +name = "prost" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" +dependencies = [ + "bytes", + "heck 0.4.1", + "itertools 0.10.5", + "lazy_static", + "log", + "multimap", + "petgraph", + "prettyplease 0.1.25", + "prost", + "prost-types", + "regex", + "syn 1.0.109", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" +dependencies = [ + "anyhow", + "itertools 0.10.5", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "prost-types" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13" +dependencies = [ + "prost", +] + +[[package]] +name = "prost-wkt" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562788060bcf2bfabe055194bd991ed2442457661744c88e0a0828ff9a08c08b" +dependencies = [ + "chrono", + "inventory", + "prost", + "serde", + "serde_derive", + "serde_json", + "typetag", +] + +[[package]] +name = "prost-wkt-build" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4dca8bcead3b728a6a7da017cc95e7f4cb2320ec4f6896bc593a1c4700f7328" +dependencies = [ + "heck 0.4.1", + "prost", + "prost-build", + "prost-types", + "quote", +] + +[[package]] +name = "prost-wkt-types" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2377c5680f2342871823045052e791b4487f7c90aae17e0feaee24cf59578a34" +dependencies = [ + "chrono", + "prost", + "prost-build", + "prost-types", + "prost-wkt", + "prost-wkt-build", + "regex", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "pulsar" +version = "5.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20f237570b5665b38c7d5228f9a1d2990e369c00e635704528996bcd5219f540" +dependencies = [ + "async-native-tls", + "async-std", + "async-trait", + "asynchronous-codec", + "bit-vec", + "bytes", + "chrono", + "crc", + "futures", + "futures-io", + "futures-timer", + "log", + "lz4", + "native-tls", + "nom", + "pem", + "prost", + "prost-build", + "prost-derive", + "rand 0.8.5", + "regex", + "tokio", + "tokio-native-tls", + "tokio-util", + "url", + "uuid 1.4.1", +] + +[[package]] +name = "pyo3" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffb88ae05f306b4bfcde40ac4a51dc0b05936a9207a4b75b798c7729c4258a59" +dependencies = [ + "cfg-if", + "indoc", + "libc", + "memoffset", + "parking_lot 0.12.1", + "pyo3-build-config", + "pyo3-ffi", + "pyo3-macros", + "unindent", +] + +[[package]] +name = "pyo3-asyncio" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2cc34c1f907ca090d7add03dc523acdd91f3a4dab12286604951e2f5152edad" +dependencies = [ + "futures", + "once_cell", + "pin-project-lite", + "pyo3", + "tokio", +] + +[[package]] +name = "pyo3-build-config" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "554db24f0b3c180a9c0b1268f91287ab3f17c162e15b54caaae5a6b3773396b0" +dependencies = [ + "once_cell", + "python3-dll-a", + "target-lexicon", +] + +[[package]] +name = "pyo3-ffi" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "922ede8759e8600ad4da3195ae41259654b9c55da4f7eec84a0ccc7d067a70a4" +dependencies = [ + "libc", + "pyo3-build-config", +] + +[[package]] +name = "pyo3-macros" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a5caec6a1dd355964a841fcbeeb1b89fe4146c87295573f94228911af3cc5a2" +dependencies = [ + "proc-macro2", + "pyo3-macros-backend", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "pyo3-macros-backend" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0b78ccbb160db1556cdb6fd96c50334c5d4ec44dc5e0a968d0a1208fa0efa8b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "python3-dll-a" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f07cd4412be8fa09a721d40007c483981bbe072cd6a21f2e83e04ec8f8343f" +dependencies = [ + "cc", +] + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quick-xml" +version = "0.28.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce5e73202a820a31f8a0ee32ada5e21029c81fd9e3ebf668a40832e4219d9d1" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "quote" +version = "1.0.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.10", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_xorshift" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" +dependencies = [ + "rand_core 0.6.4", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_users" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +dependencies = [ + "getrandom 0.2.10", + "redox_syscall 0.2.16", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax 0.7.4", +] + +[[package]] +name = "regex-automata" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.7.4", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" + +[[package]] +name = "reqwest" +version = "0.11.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cde824a14b7c14f85caff81225f411faacc04a2013f41670f41443742b1c1c55" +dependencies = [ + "base64 0.21.2", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "tokio", + "tokio-native-tls", + "tokio-rustls", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted", + "web-sys", + "winapi", +] + +[[package]] +name = "rle-decode-fast" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422" + +[[package]] +name = "rocksdb" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb6f170a4041d50a0ce04b0d2e14916d6ca863ea2e422689a5b694395d299ffe" +dependencies = [ + "libc", + "librocksdb-sys", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.37.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" +dependencies = [ + "bitflags 1.3.2", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys 0.3.8", + "windows-sys", +] + +[[package]] +name = "rustix" +version = "0.38.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" +dependencies = [ + "bitflags 2.3.3", + "errno", + "libc", + "linux-raw-sys 0.4.3", + "windows-sys", +] + +[[package]] +name = "rustls" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79ea77c539259495ce8ca47f53e66ae0330a8819f67e23ac96ca02f50e7b7d36" +dependencies = [ + "log", + "ring", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" +dependencies = [ + "base64 0.21.2", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "513722fd73ad80a71f72b61009ea1b584bcfa1483ca93949c8f290298837fa59" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "security-framework" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" + +[[package]] +name = "seq-macro" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4" + +[[package]] +name = "serde" +version = "1.0.176" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76dc28c9523c5d70816e393136b86d48909cfb27cecaa902d338c19ed47164dc" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.176" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e7b8c5dc823e3b90651ff1d3808419cd14e5ad76de04feaf37da114e7a306f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "serde_json" +version = "1.0.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_yaml" +version = "0.9.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" +dependencies = [ + "indexmap 2.0.0", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sha2" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" + +[[package]] +name = "signal-hook" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "siphasher" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" + +[[package]] +name = "slab" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" +dependencies = [ + "serde", +] + +[[package]] +name = "snafu" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4de37ad025c587a29e8f3f5605c00f70b98715ef90b9061a815b9e59e9042d6" +dependencies = [ + "doc-comment", + "snafu-derive", +] + +[[package]] +name = "snafu-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990079665f075b699031e9c08fd3ab99be5029b96f3b78dc0709e8f77e4efebf" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "snap" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e9f0ab6ef7eb7353d9119c170a436d1bf248eea575ac42d19d12f4e34130831" + +[[package]] +name = "socket2" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "sparrow-api" +version = "0.11.0" +dependencies = [ + "anyhow", + "arrow", + "chrono", + "clap", + "decorum", + "derive_more", + "enum-map", + "error-stack", + "itertools 0.11.0", + "prost", + "prost-build", + "prost-types", + "prost-wkt", + "prost-wkt-build", + "prost-wkt-types", + "serde", + "serde_yaml", + "sparrow-arrow", + "sparrow-syntax", + "tempfile", + "thiserror", + "tokio", + "tonic", + "tonic-build", + "uuid 1.4.1", +] + +[[package]] +name = "sparrow-arrow" +version = "0.11.0" +dependencies = [ + "ahash 0.8.3", + "anyhow", + "arrow", + "arrow-array", + "arrow-buffer", + "arrow-schema", + "arrow-select", + "avro-rs", + "avro-schema", + "chrono", + "decorum", + "derive_more", + "error-stack", + "half", + "itertools 0.11.0", + "num", + "serde", + "static_init", + "tracing", +] + +[[package]] +name = "sparrow-compiler" +version = "0.11.0" +dependencies = [ + "ahash 0.8.3", + "anyhow", + "arrow", + "arrow-schema", + "bit-set", + "chrono", + "clap", + "codespan-reporting", + "const_format", + "decorum", + "derive_more", + "edit-distance", + "egg", + "enum-map", + "error-stack", + "hashbrown 0.14.0", + "itertools 0.11.0", + "lalrpop", + "lalrpop-util", + "logos", + "num", + "once_cell", + "prost", + "prost-types", + "prost-wkt", + "prost-wkt-types", + "serde", + "serde_yaml", + "sha2", + "smallvec", + "sparrow-api", + "sparrow-arrow", + "sparrow-core", + "sparrow-instructions", + "sparrow-kernels", + "sparrow-merge", + "sparrow-syntax", + "static_init", + "strum 0.25.0", + "strum_macros 0.25.1", + "termcolor", + "thiserror", + "tonic", + "tracing", + "uuid 1.4.1", +] + +[[package]] +name = "sparrow-core" +version = "0.11.0" +dependencies = [ + "anyhow", + "arrow", + "chrono", + "decorum", + "futures", + "itertools 0.11.0", + "num", + "owning_ref", + "parquet", + "serde", + "sparrow-arrow", + "static_init", + "tonic", + "tracing", +] + +[[package]] +name = "sparrow-instructions" +version = "0.11.0" +dependencies = [ + "anyhow", + "arrow", + "arrow-schema", + "bincode", + "bit-set", + "bitvec", + "chrono", + "derive_more", + "enum-map", + "erased-serde", + "error-stack", + "hashbrown 0.14.0", + "itertools 0.11.0", + "lz4-sys", + "num", + "owning_ref", + "parse-display", + "prost", + "prost-wkt-types", + "rocksdb", + "serde", + "serde_json", + "smallvec", + "sparrow-api", + "sparrow-arrow", + "sparrow-kernels", + "sparrow-syntax", + "static_init", + "strum 0.25.0", + "strum_macros 0.25.1", + "tempfile", + "tonic", + "tracing", + "uuid 1.4.1", +] + +[[package]] +name = "sparrow-kernels" +version = "0.11.0" +dependencies = [ + "anyhow", + "arrow", + "bitvec", + "chrono", + "chronoutil", + "itertools 0.11.0", + "num", + "smallvec", + "sparrow-arrow", + "static_init", + "substring", +] + +[[package]] +name = "sparrow-merge" +version = "0.11.0" +dependencies = [ + "anyhow", + "arrow-arith", + "arrow-array", + "arrow-csv", + "arrow-schema", + "arrow-select", + "async-stream", + "bit-set", + "derive_more", + "error-stack", + "futures", + "itertools 0.11.0", + "proptest", + "smallvec", + "sparrow-arrow", + "sparrow-core", + "tokio", + "tracing", +] + +[[package]] +name = "sparrow-qfr" +version = "0.11.0" +dependencies = [ + "cpu-time", + "derive_more", + "error-stack", + "fallible-iterator", + "futures", + "hashbrown 0.14.0", + "inventory", + "itertools 0.11.0", + "once_cell", + "pin-project", + "prost", + "prost-types", + "tokio", + "tokio-stream", + "tonic-build", + "tracing", +] + +[[package]] +name = "sparrow-runtime" +version = "0.11.0" +dependencies = [ + "ahash 0.8.3", + "anyhow", + "arrow", + "arrow-array", + "arrow-select", + "async-once-cell", + "async-stream", + "async-trait", + "avro-rs", + "avro-schema", + "bit-set", + "bitvec", + "bytes", + "chrono", + "clap", + "dashmap", + "data-encoding", + "derive_more", + "enum-map", + "erased-serde", + "error-stack", + "fallible-iterator", + "futures", + "futures-lite", + "half", + "hashbrown 0.14.0", + "inventory", + "itertools 0.11.0", + "lz4", + "num-traits", + "object_store", + "owning_ref", + "parquet", + "pin-project", + "prost-wkt-types", + "pulsar", + "reqwest", + "serde", + "serde_json", + "serde_yaml", + "sha2", + "smallvec", + "sparrow-api", + "sparrow-arrow", + "sparrow-compiler", + "sparrow-core", + "sparrow-instructions", + "sparrow-kernels", + "sparrow-merge", + "sparrow-qfr", + "sparrow-syntax", + "static_init", + "tempfile", + "tokio", + "tokio-stream", + "tokio-util", + "tonic", + "tracing", + "url", + "uuid 1.4.1", +] + +[[package]] +name = "sparrow-session" +version = "0.11.0" +dependencies = [ + "arrow-array", + "arrow-schema", + "arrow-select", + "derive_more", + "error-stack", + "futures", + "itertools 0.11.0", + "smallvec", + "sparrow-api", + "sparrow-compiler", + "sparrow-instructions", + "sparrow-merge", + "sparrow-runtime", + "sparrow-syntax", + "static_init", + "tokio", + "tokio-stream", + "uuid 1.4.1", +] + +[[package]] +name = "sparrow-syntax" +version = "0.11.0" +dependencies = [ + "anyhow", + "arrow", + "arrow-schema", + "bigdecimal", + "bitvec", + "codespan-reporting", + "decorum", + "hashbrown 0.14.0", + "itertools 0.11.0", + "lalrpop", + "lalrpop-util", + "logos", + "serde", + "smallvec", + "sparrow-arrow", + "static_init", + "thiserror", + "tracing", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "static_init" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a2a1c578e98c1c16fc3b8ec1328f7659a500737d7a0c6d625e73e830ff9c1f6" +dependencies = [ + "bitflags 1.3.2", + "cfg_aliases", + "libc", + "parking_lot 0.11.2", + "parking_lot_core 0.8.6", + "static_init_macro", + "winapi", +] + +[[package]] +name = "static_init_macro" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70a2595fc3aa78f2d0e45dd425b22282dd863273761cc77780914b2cf3003acf" +dependencies = [ + "cfg_aliases", + "memchr", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot 0.12.1", + "phf_shared", + "precomputed-hash", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "structmeta" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ad9e09554f0456d67a69c1584c9798ba733a5b50349a6c0d0948710523922d" +dependencies = [ + "proc-macro2", + "quote", + "structmeta-derive", + "syn 2.0.27", +] + +[[package]] +name = "structmeta-derive" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a60bcaff7397072dca0017d1db428e30d5002e00b6847703e2e42005c95fbe00" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "strum" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57bd81eb48f4c437cadc685403cad539345bf703d78e63707418431cecd4522b" + +[[package]] +name = "strum" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" + +[[package]] +name = "strum_macros" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87c85aa3f8ea653bfd3ddf25f7ee357ee4d204731f6aa9ad04002306f6e2774c" +dependencies = [ + "heck 0.3.3", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "strum_macros" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6069ca09d878a33f883cc06aaa9718ede171841d3832450354410b718b097232" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.27", +] + +[[package]] +name = "substring" +version = "1.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ee6433ecef213b2e72f587ef64a2f5943e7cd16fbd82dbe8bc07486c534c86" +dependencies = [ + "autocfg", +] + +[[package]] +name = "symbol_table" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32bf088d1d7df2b2b6711b06da3471bc86677383c57b27251e18c56df8deac14" +dependencies = [ + "ahash 0.7.6", + "hashbrown 0.12.3", +] + +[[package]] +name = "symbolic_expressions" +version = "5.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c68d531d83ec6c531150584c42a4290911964d5f0d79132b193b67252a23b71" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synstructure" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "unicode-xid", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "target-lexicon" +version = "0.12.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d2faeef5759ab89935255b1a4cd98e0baf99d1085e37d36599c625dac49ae8e" + +[[package]] +name = "tempfile" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5486094ee78b2e5038a6382ed7645bc084dc2ec433426ca4c3cb61e2007b8998" +dependencies = [ + "cfg-if", + "fastrand 2.0.0", + "redox_syscall 0.3.5", + "rustix 0.38.4", + "windows-sys", +] + +[[package]] +name = "term" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" +dependencies = [ + "dirs-next", + "rustversion", + "winapi", +] + +[[package]] +name = "termcolor" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "thiserror" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "thrift" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e54bc85fc7faa8bc175c4bab5b92ba8d9a3ce893d0e9f42cc455c8ab16a9e09" +dependencies = [ + "byteorder", + "integer-encoding", + "ordered-float", +] + +[[package]] +name = "time" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" +dependencies = [ + "autocfg", + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "pin-project-lite", + "socket2", + "tokio-macros", + "windows-sys", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "tonic" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a" +dependencies = [ + "async-trait", + "axum", + "base64 0.21.2", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "tokio", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tonic-build" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6fdaae4c2c638bb70fe42803a26fbd6fc6ac8c72f5c59f67ecc2a2dcabf4b07" +dependencies = [ + "prettyplease 0.1.25", + "proc-macro2", + "prost-build", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand 0.8.5", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "tracing-core" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +dependencies = [ + "once_cell", +] + +[[package]] +name = "tracing-error" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d686ec1c0f384b1277f097b2f279a2ecc11afe8c133c1aabf036a27cb4cd206e" +dependencies = [ + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +dependencies = [ + "sharded-slab", + "thread_local", + "tracing-core", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + +[[package]] +name = "twox-hash" +version = "1.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" +dependencies = [ + "cfg-if", + "static_assertions", +] + +[[package]] +name = "typed-builder" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78cea224ddd4282dfc40d1edabbd0c020a12e946e3a48e2c2b8f6ff167ad29fe" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "typetag" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aec6850cc671cd0cfb3ab285465e48a3b927d9de155051c35797446b32f9169f" +dependencies = [ + "erased-serde", + "inventory", + "once_cell", + "serde", + "typetag-impl", +] + +[[package]] +name = "typetag-impl" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30c49a6815b4f8379c36f06618bc1b80ca77aaf8a3fd4d8549dca6fdb016000f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", +] + +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-ident" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "unindent" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" + +[[package]] +name = "unsafe-libyaml" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28467d3e1d3c6586d8f25fa243f544f5800fec42d97032474e17222c2b75cfa" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "url" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom 0.2.10", + "serde", +] + +[[package]] +name = "uuid" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" +dependencies = [ + "getrandom 0.2.10", + "rand 0.8.5", +] + +[[package]] +name = "value-bag" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d92ccd67fb88503048c01b59152a04effd0782d035a83a6d256ce6085f08f4a3" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + +[[package]] +name = "waker-fn" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" + +[[package]] +name = "walkdir" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.27", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.27", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" + +[[package]] +name = "wasm-streams" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bbae3363c08332cadccd13b67db371814cd214c2524020932f0804b8cf7c078" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.48.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + +[[package]] +name = "winreg" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +dependencies = [ + "winapi", +] + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "zerocopy" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6580539ad917b7c026220c4b3f2c08d52ce54d6ce0dc491e66002e35388fab46" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb" +dependencies = [ + "proc-macro2", + "syn 1.0.109", + "synstructure", +] + +[[package]] +name = "zstd" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a27595e173641171fc74a1232b7b1c7a7cb6e18222c11e9dfb9888fa424c53c" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "6.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee98ffd0b48ee95e6c5168188e44a54550b1564d9d530ee21d5f0eaed1069581" +dependencies = [ + "libc", + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.8+zstd.1.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5556e6ee25d32df2586c098bbfa278803692a20d0ab9565e049480d52707ec8c" +dependencies = [ + "cc", + "libc", + "pkg-config", +] diff --git a/python/Cargo.toml b/python/Cargo.toml new file mode 100644 index 000000000..02c175078 --- /dev/null +++ b/python/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "kaskada" +authors = ["Kaskada Developers"] +edition = "2021" +license = "Apache-2.0" +version = "0.6.0-a.1" +description = """ +Python library for building and executing temporal queries. +""" + +# This needs to be its own workspace. +[workspace] + +[dependencies] +arrow = { version = "43.0.0", features = ["pyarrow"] } +derive_more = "0.99.17" +error-stack = { version = "0.3.1", features = ["anyhow", "spantrace"] } +futures = "0.3.27" +itertools = "0.11.0" +# local_dynamic_tls is necessary (at least on Linux) to avoid +# "cannot allocate memory in static TLS block" +mimalloc = { version = "0.1.37", default-features = false, features = ["local_dynamic_tls"] } +pyo3 = {version = "0.19.1", features = ["abi3-py38", "extension-module", "generate-import-lib"]} +pyo3-asyncio = { version = "0.19.0", features = ["tokio-runtime"] } +sparrow-session = { path = "../crates/sparrow-session" } +tokio = { version = "1.27.0", features = ["sync"] } +tracing = "0.1.37" + +[lib] +name = "kaskada" +# `cdylib` is necessary to produce a shared library for Python. +# This can't be used by downstream Rust code (eg., as a library). +crate-type = ["cdylib"] + +[profile.release] +lto = true +codegen-units = 1 \ No newline at end of file diff --git a/python/README.md b/python/README.md new file mode 100644 index 000000000..2ea1da6cb --- /dev/null +++ b/python/README.md @@ -0,0 +1,35 @@ +# Kaskada Timestreams + + +Kaskada's `timestreams` library makes it easy to work with structured event-based data. +Define temporal queries on event-based data loaded from Python, using Pandas or PyArrow and push new data in as it occurs. +Or, execute the queries directly on events in your data lake and/or as they arrive on a stream. + +With Kaskada you can unleash the value of real-time, temporal queries without the complexity of "big" infrastructure components like a distributed stream or stream processing system. + +Under the hood, `timestreams` is an efficient temporal query engine built in Rust. +It is built on Apache Arrow, using the same columnar execution strategy that makes ... + + + +## Install Python + +Use `pyenv` and install at least `3.8` (most development occurs under `3.11`). +If multiple versions are installed, `nox` will test against each of them. + +## Building and Testing + +To build this package, first install `maturin`: + +```shell +poetry shell +poetry install --no-root +maturin develop +pytest +``` + +Alternatively, install nox and run the tests inside an isolated environment: + +```shell +nox +``` diff --git a/python/codecov.yml b/python/codecov.yml new file mode 100644 index 000000000..934af4f8e --- /dev/null +++ b/python/codecov.yml @@ -0,0 +1,9 @@ +comment: false +coverage: + status: + project: + default: + target: "100" + patch: + default: + target: "100"nox \ No newline at end of file diff --git a/python/docs/.gitignore b/python/docs/.gitignore new file mode 100644 index 000000000..c52066621 --- /dev/null +++ b/python/docs/.gitignore @@ -0,0 +1,5 @@ +_build +.jupyter_cache +jupyter_execute +source/reference/apidocs +source/iframe_figures \ No newline at end of file diff --git a/python/docs/source/_extensions/gallery_directive.py b/python/docs/source/_extensions/gallery_directive.py new file mode 100644 index 000000000..d2a2a5fad --- /dev/null +++ b/python/docs/source/_extensions/gallery_directive.py @@ -0,0 +1,146 @@ +"""A directive to generate a gallery of images from structured data. + +Generating a gallery of images that are all the same size is a common +pattern in documentation, and this can be cumbersome if the gallery is +generated programmatically. This directive wraps this particular use-case +in a helper-directive to generate it with a single YAML configuration file. + +It currently exists for maintainers of the pydata-sphinx-theme, +but might be abstracted into a standalone package if it proves useful. +""" +from pathlib import Path +from typing import Any +from typing import Dict +from typing import List + +from docutils import nodes +from docutils.parsers.rst import directives +from sphinx.application import Sphinx +from sphinx.util import logging +from sphinx.util.docutils import SphinxDirective +from yaml import safe_load + + +logger = logging.getLogger(__name__) + + +TEMPLATE_GRID = """ +`````{{grid}} {columns} +{options} + +{content} + +````` +""" + +GRID_CARD = """ +````{{grid-item-card}} {title} +{options} + +{content} +```` +""" + + +class GalleryGridDirective(SphinxDirective): + """A directive to show a gallery of images and links in a Bootstrap grid. + + The grid can be generated from a YAML file that contains a list of items, or + from the content of the directive (also formatted in YAML). Use the parameter + "class-card" to add an additional CSS class to all cards. When specifying the grid + items, you can use all parameters from "grid-item-card" directive to customize + individual cards + ["image", "header", "content", "title"]. + + Danger: + This directive can only be used in the context of a Myst documentation page as + the templates use Markdown flavored formatting. + """ + + name = "gallery-grid" + has_content = True + required_arguments = 0 + optional_arguments = 1 + final_argument_whitespace = True + option_spec = { + # A class to be added to the resulting container + "grid-columns": directives.unchanged, + "class-container": directives.unchanged, + "class-card": directives.unchanged, + } + + def run(self) -> List[nodes.Node]: + """Create the gallery grid.""" + if self.arguments: + # If an argument is given, assume it's a path to a YAML file + # Parse it and load it into the directive content + path_data_rel = Path(self.arguments[0]) + path_doc, _ = self.get_source_info() + path_doc = Path(path_doc).parent + path_data = (path_doc / path_data_rel).resolve() + if not path_data.exists(): + logger.warn(f"Could not find grid data at {path_data}.") + nodes.text("No grid data found at {path_data}.") + return + yaml_string = path_data.read_text() + else: + yaml_string = "\n".join(self.content) + + # Use all the element with an img-bottom key as sites to show + # and generate a card item for each of them + grid_items = [] + for item in safe_load(yaml_string): + # remove parameters that are not needed for the card options + title = item.pop("title", "") + + # build the content of the card using some extra parameters + header = f"{item.pop('header')} \n^^^ \n" if "header" in item else "" + image = f"![image]({item.pop('image')}) \n" if "image" in item else "" + content = f"{item.pop('content')} \n" if "content" in item else "" + + # optional parameter that influence all cards + if "class-card" in self.options: + item["class-card"] = self.options["class-card"] + + loc_options_str = "\n".join(f":{k}: {v}" for k, v in item.items()) + " \n" + + card = GRID_CARD.format( + options=loc_options_str, content=header + image + content, title=title + ) + grid_items.append(card) + + # Parse the template with Sphinx Design to create an output container + # Prep the options for the template grid + class_ = "gallery-directive" + f' {self.options.get("class-container", "")}' + options = {"gutter": 2, "class-container": class_} + options_str = "\n".join(f":{k}: {v}" for k, v in options.items()) + + # Create the directive string for the grid + grid_directive = TEMPLATE_GRID.format( + columns=self.options.get("grid-columns", "1 2 3 4"), + options=options_str, + content="\n".join(grid_items), + ) + + # Parse content as a directive so Sphinx Design processes it + container = nodes.container() + self.state.nested_parse([grid_directive], 0, container) + + # Sphinx Design outputs a container too, so just use that + return [container.children[0]] + + +def setup(app: Sphinx) -> Dict[str, Any]: + """Add custom configuration to sphinx app. + + Args: + app: the Sphinx application + + Returns: + the 2 parallel parameters set to ``True``. + """ + app.add_directive("gallery-grid", GalleryGridDirective) + + return { + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/python/docs/source/_static/favicon.png b/python/docs/source/_static/favicon.png new file mode 100644 index 000000000..2983af6ae Binary files /dev/null and b/python/docs/source/_static/favicon.png differ diff --git a/python/docs/source/_static/kaskada-negative.svg b/python/docs/source/_static/kaskada-negative.svg new file mode 100644 index 000000000..6b78c25fc --- /dev/null +++ b/python/docs/source/_static/kaskada-negative.svg @@ -0,0 +1 @@ +Kaskada Logo Horizontal Negative RGB \ No newline at end of file diff --git a/python/docs/source/_static/kaskada-positive.svg b/python/docs/source/_static/kaskada-positive.svg new file mode 100644 index 000000000..ccfae84e2 --- /dev/null +++ b/python/docs/source/_static/kaskada-positive.svg @@ -0,0 +1 @@ +Kaskada Logo Horizontal Positive RGB \ No newline at end of file diff --git a/python/docs/source/_templates/autosummary/class.rst b/python/docs/source/_templates/autosummary/class.rst new file mode 100644 index 000000000..bc23de59b --- /dev/null +++ b/python/docs/source/_templates/autosummary/class.rst @@ -0,0 +1,10 @@ +:html_theme.sidebar_secondary.remove: + +{{ objname | escape | underline}} + +.. currentmodule:: {{ module }} + +.. auto{{ objtype }}:: {{ objname }} + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/python/docs/source/_templates/autosummary/function.rst b/python/docs/source/_templates/autosummary/function.rst new file mode 100644 index 000000000..5a51f80a5 --- /dev/null +++ b/python/docs/source/_templates/autosummary/function.rst @@ -0,0 +1,7 @@ +:html_theme.sidebar_secondary.remove: + +{{ objname | escape | underline}} + +.. currentmodule:: {{ module }} + +.. auto{{ objtype }}:: {{ objname }} \ No newline at end of file diff --git a/python/docs/source/_templates/autosummary/method.rst b/python/docs/source/_templates/autosummary/method.rst new file mode 100644 index 000000000..5a51f80a5 --- /dev/null +++ b/python/docs/source/_templates/autosummary/method.rst @@ -0,0 +1,7 @@ +:html_theme.sidebar_secondary.remove: + +{{ objname | escape | underline}} + +.. currentmodule:: {{ module }} + +.. auto{{ objtype }}:: {{ objname }} \ No newline at end of file diff --git a/python/docs/source/conf.py b/python/docs/source/conf.py new file mode 100644 index 000000000..75b9e47b7 --- /dev/null +++ b/python/docs/source/conf.py @@ -0,0 +1,121 @@ +"""Sphinx configuration.""" +import sys +from pathlib import Path +from typing import Any +from typing import Dict + + +sys.path.append(str(Path(".").resolve())) + +project = "kaskada" +author = "Kaskada Contributors" +copyright = "2023, Kaskada Contributors" +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.napoleon", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx_design", + # "myst_parser", + "myst_nb", + "sphinx_copybutton", + "_extensions.gallery_directive", +] +autodoc_typehints = "description" +language = "en" + +html_theme = "sphinx_book_theme" +html_favicon = "_static/favicon.png" +html_title = "Kaskada" +html_js_files = [ + "https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.4/require.min.js" +] + +html_theme_options: Dict[str, Any] = { + "repository_url": "https://github.com/kaskada-ai/kaskada", + "use_repository_button": True, + "use_source_button": True, + "use_edit_page_button": True, + "home_page_in_toc": False, + "use_issues_button": True, + "repository_branch": "main", + "path_to_docs": "python/docs/source", + "announcement": ( + "This describes the next version of Kaskada. " + "It is currently available as an alpha release." + ), + "icon_links": [ + { + "name": "GitHub", + "url": "https://github.com/kaskada-ai/kaskada", # required + "icon": "fa-brands fa-square-github", + "type": "fontawesome", + }, + { + "name": "Slack", + "url": "https://join.slack.com/t/kaskada-hq/shared_invite/zt-1t1lms085-bqs2jtGO2TYr9kuuam~c9w", + "icon": "fa-brands fa-slack", + }, + ], + "logo": { + "image_light": "_static/kaskada-positive.svg", + "image_dark": "_static/kaskada-negative.svg", + }, + "primary_sidebar_end": ["indices.html"], + "show_toc_level": 2, + "show_nav_level": 2, +} + +templates_path = ["_templates"] +html_static_path = ["_static"] + +html_context = { + "github_user": "kaskada-ai", + "github_repo": "kaskada", + "github_version": "main", + "doc_path": "kaskada/docs/source", + "analytics": { + "google_analytics_id": "G-HR9E2E6TG4", + }, +} + +intersphinx_mapping: Dict[str, Any] = { + "python": ("http://docs.python.org/3", None), + "pandas": ("http://pandas.pydata.org/docs", None), + "pyarrow": ("https://arrow.apache.org/docs", None), +} + +# adds useful copy functionality to all the examples; also +# strips the '>>>' and '...' prompt/continuation prefixes. +copybutton_prompt_text = r">>> |\.\.\. " +copybutton_prompt_is_regexp = True + +# Options for Todos +todo_include_todos = True + +# Options for Myst (markdown) +# https://myst-parser.readthedocs.io/en/v0.17.1/syntax/optional.html +myst_enable_extensions = [ + "colon_fence", + "deflist", + "smartquotes", + "replacements", +] +myst_heading_anchors = 3 + +# -- Options for autodoc ---------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#configuration + +# Automatically extract typehints when specified and place them in +# descriptions of the relevant function/method. +autodoc_typehints = "description" + +# Don't show class signature with the class' name. +autodoc_class_signature = "separated" + +autosummary_generate = True + +napoleon_preprocess_types = True + +suppress_warnings = ["mystnb.unknown_mime_type"] diff --git a/python/docs/source/examples/index.md b/python/docs/source/examples/index.md new file mode 100644 index 000000000..5201d6fc3 --- /dev/null +++ b/python/docs/source/examples/index.md @@ -0,0 +1,9 @@ +# Examples + + +```{toctree} +:hidden: +:maxdepth: 2 + +time_centric +``` \ No newline at end of file diff --git a/python/docs/source/examples/time_centric.ipynb b/python/docs/source/examples/time_centric.ipynb new file mode 100644 index 000000000..0b77d64dc --- /dev/null +++ b/python/docs/source/examples/time_centric.ipynb @@ -0,0 +1,343 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "id": "5a20a51f", + "metadata": { + "id": "5a20a51f" + }, + "source": [ + "# Time-centric Calculations\n", + "\n", + "Kaskada was built to process and perform temporal calculations on event streams,\n", + "with real-time analytics and machine learning in mind. It is not exclusively for\n", + "real-time applications, but Kaskada excels at time-centric computations and\n", + "aggregations on event-based data.\n", + "\n", + "For example, let's say you're building a user analytics dashboard at an\n", + "ecommerce retailer. You have event streams showing all actions the user has\n", + "taken, and you'd like to include in the dashboard:\n", + "* the total number of events the user has ever generated\n", + "* the total number of purchases the user has made\n", + "* the total revenue from the user\n", + "* the number of purchases made by the user today\n", + "* the total revenue from the user today\n", + "* the number of events the user has generated in the past hour\n", + "\n", + "Because the calculations needed here are a mix of hourly, daily, and over all of\n", + "history, more than one type of event aggregation needs to happen. Table-centric\n", + "tools like those based on SQL would require multiple JOINs and window functions,\n", + "which would be spread over multiple queries or CTEs. \n", + "\n", + "Kaskada was designed for these types of time-centric calculations, so we can do\n", + "each of the calculations in the list in one line:\n", + "\n", + "```python\n", + "record({\n", + " \"event_count_total\": DemoEvents.count(),\n", + " \"purchases_total_count\": DemoEvents.filter(DemoEvents.col(\"event_name\").eq(\"purchase\")).count(),\n", + " \"revenue_total\": DemoEvents.col(\"revenue\").sum(),\n", + " \"purchases_daily\": DemoEvents.filter(DemoEvents.col(\"event_name\").eq(\"purchase\")).count(window=Daily()),\n", + " \"revenue_daily\": DemoEvents.col(\"revenue\").sum(window=Daily()),\n", + " \"event_count_hourly\": DemoEvents.count(window=Hourly()),\n", + "})\n", + "```\n", + "\n", + "```{warning}\n", + "The previous example demonstrates the use of `Daily()` and `Hourly()` windowing which aren't yet part of the new Python library.\n", + "```\n", + "\n", + "Of course, a few more lines of code are needed to put these calculations to work,\n", + "but these six lines are all that is needed to specify the calculations\n", + "themselves. Each line may specify:\n", + "* the name of a calculation (e.g. `event_count_total`)\n", + "* the input data to start with (e.g. `DemoEvents`)\n", + "* selecting event fields (e.g. `DemoEvents.col(\"revenue\")`)\n", + "* function calls (e.g. `count()`)\n", + "* event filtering (e.g. `filter(DemoEvents.col(\"event_name\").eq(\"purchase\"))`)\n", + "* time windows to calculate over (e.g. `window=Daily()`)\n", + "\n", + "...with consecutive steps chained together in a familiar way.\n", + "\n", + "Because Kaskada was built for time-centric calculations on event-based data, a\n", + "calculation we might describe as \"total number of purchase events for the user\"\n", + "can be defined in Kaskada in roughly the same number of terms as the verbal\n", + "description itself.\n", + "\n", + "Continue through the demo below to find out how it works.\n", + "\n", + "See [the Kaskada documentation](../guide/index) for lots more information." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "BJ2EE9mSGtGB", + "metadata": { + "id": "BJ2EE9mSGtGB" + }, + "source": [ + "## Kaskada Client Setup\n", + "\n", + "```\n", + "%pip install kaskada>=0.6.0-a.1\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "37db47ba", + "metadata": { + "tags": [ + "hide-output" + ] + }, + "outputs": [], + "source": [ + "import kaskada as kd\n", + "kd.init_session()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "5b838eef", + "metadata": {}, + "source": [ + "## Example dataset\n", + "\n", + "For this demo, we'll use a very small example data set, which, for simplicity and portability of this demo notebook, we'll read from a string.\n", + "\n", + "```{note}\n", + "For simplicity, instead of a CSV file or other file format we read and then parse data from a CSV string.\n", + "You can load your own event data from many common sources, including Pandas DataFrames and Parquet files.\n", + "See {py:mod}`kaskada.sources` for more information on the available sources.\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ba4bb6b6", + "metadata": {}, + "outputs": [], + "source": [ + "# For demo simplicity, instead of a CSV file, we read and then parse data from a\n", + "# CSV string. Kaskadaa\n", + "event_data_string = '''\n", + " event_id,event_at,entity_id,event_name,revenue\n", + " ev_00001,2022-01-01 22:01:00,user_001,login,0\n", + " ev_00002,2022-01-01 22:05:00,user_001,view_item,0\n", + " ev_00003,2022-01-01 22:20:00,user_001,view_item,0\n", + " ev_00004,2022-01-01 23:10:00,user_001,view_item,0\n", + " ev_00005,2022-01-01 23:20:00,user_001,view_item,0\n", + " ev_00006,2022-01-01 23:40:00,user_001,purchase,12.50\n", + " ev_00007,2022-01-01 23:45:00,user_001,view_item,0\n", + " ev_00008,2022-01-01 23:59:00,user_001,view_item,0\n", + " ev_00009,2022-01-02 05:30:00,user_001,login,0\n", + " ev_00010,2022-01-02 05:35:00,user_001,view_item,0\n", + " ev_00011,2022-01-02 05:45:00,user_001,view_item,0\n", + " ev_00012,2022-01-02 06:10:00,user_001,view_item,0\n", + " ev_00013,2022-01-02 06:15:00,user_001,view_item,0\n", + " ev_00014,2022-01-02 06:25:00,user_001,purchase,25\n", + " ev_00015,2022-01-02 06:30:00,user_001,view_item,0\n", + " ev_00016,2022-01-02 06:31:00,user_001,purchase,5.75\n", + " ev_00017,2022-01-02 07:01:00,user_001,view_item,0\n", + " ev_00018,2022-01-01 22:17:00,user_002,view_item,0\n", + " ev_00019,2022-01-01 22:18:00,user_002,view_item,0\n", + " ev_00020,2022-01-01 22:20:00,user_002,view_item,0\n", + "'''\n", + "\n", + "events = kd.sources.CsvString(event_data_string,\n", + " time_column_name='event_at',\n", + " key_column_name = 'entity_id')\n", + "\n", + "# Inspect the event data\n", + "events.preview()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "568d1272", + "metadata": {}, + "source": [ + "## Define queries and calculations" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "c2c5a298", + "metadata": {}, + "source": [ + "Kaskada query language is parsed by the `fenl` extension. Query calculations are\n", + "defined in a code blocks starting with `%%fenl`.\n", + "\n", + "See [the `fenl`\n", + "documentation](https://kaskada-ai.github.io/docs-site/kaskada/main/fenl/fenl-quick-start.html)\n", + "for more information.\n", + "\n", + "Let's do a simple query for events for a specific entity ID.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bce22e47", + "metadata": {}, + "outputs": [], + "source": [ + "events.filter(events.col(\"entity_id\").eq(\"user_002\")).preview()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "6b5f2725", + "metadata": {}, + "source": [ + "\n", + "Beyond querying for events, Kaskada has a powerful syntax for defining\n", + "calculations on events, temporally across history.\n", + "\n", + "The six calculations discussed at the top of this demo notebook are below.\n", + "\n", + "(Note that some functions return `NaN` if no events for that user have occurred\n", + "within the time window.)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3ad6d596", + "metadata": {}, + "outputs": [], + "source": [ + "purchases = events.filter(events.col(\"event_name\").eq(\"purchase\"))\n", + "\n", + "features = kd.record({\n", + " \"event_count_total\": events.count(),\n", + " #\"event_count_hourly\": events.count(window=Hourly()),\n", + " \"purchases_total_count\": purchases.count(),\n", + " #\"purchases_today\": purchases.count(window=Since(Daily()),\n", + " #\"revenue_today\": events.col(\"revenue\").sum(window=Since(Daily())),\n", + " \"revenue_total\": events.col(\"revenue\").sum(),\n", + "})\n", + "features.preview()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "1c315938", + "metadata": {}, + "source": [ + "## At Any Time\n", + "\n", + "A key feature of Kaskada's time-centric design is the ability to query for\n", + "calculation values at any point in time. Traditional query languages (e.g. SQL)\n", + "can only return data that already exists---if we want to return a row of\n", + "computed/aggregated data, we have to compute the row first, then return it. As a\n", + "specific example, suppose we have SQL queries that produce daily aggregations\n", + "over event data, and now we want to have the same aggregations on an hourly\n", + "basis. In SQL, we would need to write new queries for hourly aggregations; the\n", + "queries would look very similar to the daily ones, but they would still be\n", + "different queries.\n", + "\n", + "With Kaskada, we can define the calculations once, and then specify the points\n", + "in time at which we want to know the calculation values when we query them.\n", + "\n", + "In the examples so far, we have used `preview()` to get a DataFrame containing\n", + "some of the rows from the Timestreams we've defined. By default, this produces\n", + "a _history_ containing all the times the result changed. This is useful for\n", + "using past values to create training examples.\n", + "\n", + "We can also execute the query for the values at a specific point in time." + ] + }, + { + "cell_type": "markdown", + "id": "082e174d", + "metadata": { + "tags": [ + "hide-output" + ] + }, + "source": [ + "```\n", + "features.preview(at=\"2022-01-01 22:00\")\n", + "``````" + ] + }, + { + "cell_type": "markdown", + "id": "5a44c5f7", + "metadata": {}, + "source": [ + "You can also compose a query that produces values at specific points in time.\n", + "\n", + "```\n", + "features.when(hourly())\n", + "```\n", + "\n", + "Regardless of the time cadence of the calculations themselves, the query output\n", + "can contain rows for whatever time points you specify. You can define a set of\n", + "daily calculations and then get hourly updates during the day. Or, you can\n", + "publish the definitions of some features in a Python module and different users\n", + "can query those same calculations for hourly, daily, and monthly\n", + "values---without editing the calculation definitions themselves.\n", + "\n", + "## Adding more calculations to the query\n", + "\n", + "We can add two new calculations, also in one line each, representing:\n", + "* the time of the user's first event\n", + "* the time of the user's last event\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "2ba09e77-0fdf-43f4-960b-50a126262ec7", + "metadata": { + "id": "2ba09e77-0fdf-43f4-960b-50a126262ec7" + }, + "source": [ + "This is only a small sample of possible Kaskada queries and capabilities. See\n", + "everything that's possible with [Timestreams](../reference/timestream/index.md)." + ] + } + ], + "metadata": { + "colab": { + "collapsed_sections": [ + "6924ca3e-28b3-4f93-b0cf-5f8afddc11d8", + "936700a9-e042-401c-9156-7bb18652e109", + "08f5921d-36dc-41d1-a2a6-ae800b7a11de" + ], + "private_outputs": true, + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/python/docs/source/guide/aggregation.md b/python/docs/source/guide/aggregation.md new file mode 100644 index 000000000..79eba855a --- /dev/null +++ b/python/docs/source/guide/aggregation.md @@ -0,0 +1,3 @@ +# Aggregation + +## Windowing \ No newline at end of file diff --git a/python/docs/source/guide/data_types.md b/python/docs/source/guide/data_types.md new file mode 100644 index 000000000..bb60a7dd0 --- /dev/null +++ b/python/docs/source/guide/data_types.md @@ -0,0 +1,108 @@ +# Data Types + +Kaskada operates on typed Timestreams. +Similar to how every Pandas `DataFrame` has an associated `dtype`, every Kaskada `Timestream` has an associated type. +The set of supported types is based on the types supported by [Apache Arrow](https://arrow.apache.org/). + +Each `Timestream` contains points of the corresponding type. +We'll often say that the "type" of a `Timestream` is the type of the values it contains. + +Kaskada's type system describes several kinds of values. +Scalar types correspond to simple values, such as the string `"hello"` or the integer `57`. +They correspond to a stream containing values of the given type, or `null`. +Composite types are created from other types. +For instance, records may be created using scalar and other composite types as fields. +An expression producing a record type is a stream that produces a value of the given record type or `null`. + +## Scalar Types + +Scalar types include booleans, numbers, strings, timestamps, durations and calendar intervals. + +:::{list-table} Scalar Types +:widths: 1, 3 +:header-rows: 1 + +- * Types + * Description +- * `bool` + * Booleans represent true or false. + + Examples: `true`, `false`. +- * `u8`, `u16`, `u32`, `u64` + * Unsigned integer numbers of the specified bit width. + + Examples: `0`, `1`, `1000` +- * `i8`, `i16`, `i32`, `i64` + * Signed integer numbers of the specified bit width. + + Examples: `0`, `1`, `-100` +- * `f32`, `f64` + * Floating point numbers of the specified bit width. + + Examples: `0`, `1`, `-100`, `1000`, `0.0`, `-1.0`, `-100837.631`. +- * `str` + * Unicode strings. + + Examples: `"hello", "hi 'bob'"`. + +- * `timestamp_s`, `timestamp_ms`, `timestamp_us`, `timestamp_ns` + * Points in time relative the Unix Epoch (00:00:00 UTC on January 1, 1970). + Time unit may be seconds (s), milliseconds (ms), microseconds (us) or nanoseconds (ns). + + Examples: `1639595174 as timestamp_s` +- * `duration_s`, `duration_ms`, `duration_us`, `duration_ns` + * A duration of a fixed amount of a specific time unit. + Time unit may be seconds (s), milliseconds (ms), microseconds (us) or nanoseconds (ns). + + Examples: `-100 as duration_ms` +- * `interval_days`, `interval_months` + * A calendar interval corresponding to the given amount of the corresponding time. + The length of an interval depends on the point in time it is added to. + For instance, adding 1 `interval_month` to a timestamp will shift to the same day of the next month. + + Examples: `1 as interval_days`, `-100 as interval_months` +::: + +## Record Types + +Records allow combining 1 or more values of potentially different types into a single value. +Records are unnamed - any two records with the same set of field names and value types are considered equal. Fields within a record may have different types. +Field names must start with a letter. + +For example, `{name: string, age: u32 }` is a record type with two fields and `{name: 'Ben', age: 33 }` is corresponding value. + +NOTE: Record types may be nested. + +## Type Coercion +Kaskada implicitly coerces numeric types when different kinds of numbers are combined. +For example adding a 64-bit signed integer value to a 32-bit floating point value produces a 64-point floating point value + +Type coercion will never produce an integer overflow or reduction in numeric precision. +If needed, such conversions must be explicitly specified using `as`. + +The coercion rules can be summarized with the following rules: + +1. Unsigned integers can be widened: `u8` ⇨ `u16` ⇨ `u32` ⇨ `u64`. +2. Integers can be widened: `i8` ⇨ `i16` ⇨ `i32` ⇨ `i64`. +3. Floating point numbers can be widened: `f16` ⇨ `f32` ⇨ `f64`. +4. Unsigned integers can be promoted to the next wider integer `u8` ⇨ `i16`, `u16` ⇨ `i32`, `u32` ⇨ `i64`. +5. All numbers may be converted to `f64`. +6. Strings may be implicitly converted to timestamps by attempting to parse them as RFC3339 values. +The timestamp will be null for strings that don't successfully parse. + +One aspect of the coercion rules is that when an operation is applied to two different numeric types the result may be a third type which they may both be coerced to. +The type promotion table shows the type resulting from a binary operation involving two different numeric types. + +| | `u8` | `u16` | `u32` | `u64` | `i8` | `i16` | `i32` | `i64` | `f16` | `f32` | `f64` | +| --------- | ----- | ----- | ----- | ----- | ----- | ----- | ----- | ----- | ----- | ----- | ----- | +| **`u8`** | `u8` | `u16` | `u32` | `u64` | `i16` | `i16` | `i32` | `i64` | `f16` | `f32` | `f64` | +| **`u16`** | `u16` | `u16` | `u32` | `u64` | `i32` | `i32` | `i32` | `i64` | `f16` | `f32` | `f64` | +| **`u32`** | `u32` | `u32` | `u32` | `u64` | `i64` | `i64` | `i64` | `i64` | `f32` | `f32` | `f64` | +| **`u64`** | `u64` | `u64` | `u64` | `u64` | `f64` | `f64` | `f64` | `f64` | `f64` | `f64` | `f64` | +| **`i8`** | `i16` | `i32` | `i64` | `f64` | `i8` | `i16` | `i32` | `i64` | `f16` | `f32` | `f64` | +| **`i16`** | `i16` | `i32` | `i64` | `f64` | `i16` | `i16` | `i32` | `i64` | `f16` | `f32` | `f64` | +| **`i32`** | `i32` | `i32` | `i64` | `f64` | `i32` | `i32` | `i32` | `i64` | `f16` | `f32` | `f64` | +| **`i64`** | `i64` | `i64` | `i64` | `f64` | `i64` | `i64` | `i64` | `i64` | `f16` | `f32` | `f64` | +| **`f16`** | `f16` | `f16` | `f16` | `f16` | `f16` | `f16` | `f16` | `f16` | `f16` | `f32` | `f64` | +| **`f32`** | `f32` | `f32` | `f32` | `f32` | `f32` | `f32` | `f32` | `f32` | `f32` | `f32` | `f64` | +| **`f64`** | `f64` | `f64` | `f64` | `f64` | `f64` | `f64` | `f64` | `f64` | `f64` | `f64` | `f64` | \ No newline at end of file diff --git a/python/docs/source/guide/entities.md b/python/docs/source/guide/entities.md new file mode 100644 index 000000000..79f4a9cf9 --- /dev/null +++ b/python/docs/source/guide/entities.md @@ -0,0 +1,61 @@ +# Entities and Grouping + +Entities organize data for use in feature engineering. +They describe the particular objects that a prediction will be made for. +The result of a feature computation is a _feature vector_ for each entity at various points in time. + +## What is an Entity? +Entities represent the categories or "nouns" associated with the data. +They can generally be thought of as any category of object related to the events being processed. +For example, when manipulating purchase events, there may be entities for the customers, vendors and items being purchased. +Each purchase event may be related to a customer, a vendor, and one or more items. + +If something can be given a name or other unique identifier, it can likely be used as an entity. +In a relational database, an entity would be anything that is identified by the same key in a set of tables. + +## What is an Entity Key? +An entity kind is a category of objects, for example customer or vendor. +An entity key identifies a unique instance of that category -- a `customer_id` or a `vendor_id`. + +One may think of an entity as a table containing instances -- or rows -- of that type of entity. +The entity key would be the primary key of that table. + +The following table shows some example entities and possible keys. +Many of the example instances may not be suitable for use as the entity key, for the same reason you wouldn't use them as a primary key. +For example, using `Vancouver` to identify cities would lead to ambiguity between Vancouver in British Columbia and Vancouver in Washington State. +In these cases, you'd likely use some other identifier for instances. +Others may be useful, such as using the airport code. + +:::{list-table} Example Entities and corresponding keys. +:header-rows: 1 + +* - Example Entity + - Example Entity Instance +* - Houses + - 1600 Pennsylvania Avenue +* - Airports + - SEA +* - Customers + - John Doe +* - City + - Vancouver +* - State + - Washington +::: + +## Entities and Aggregation + +Many, if not all, Kaskada queries involve aggregating events to produce values. +Entities provide an implicit grouping for the aggregation. +When we write `sum(Purchases.amount)` it is an aggregation that returns the sum of purchases made _by each entity_. +This is helpful since the _feature vector_ for an entity will depend only on events related to that entity. + +```{todo} +Example of grouped streams and aggregation +``` + +## Joining + +Joining with the same entity happens automatically. +Joining with other entities (and even other kinds of entities) is done using `lookup`. +See [Joins](joins.md) for more information. \ No newline at end of file diff --git a/python/docs/source/guide/index.md b/python/docs/source/guide/index.md new file mode 100644 index 000000000..bbccb4e5c --- /dev/null +++ b/python/docs/source/guide/index.md @@ -0,0 +1,47 @@ +# User Guide + +Understanding and reacting to the world in real-time requires understanding what is happening _now_ in the context of what happened in the past. +You need the ability to understand if what just happened is unusual, how it relates to what happened previously, and how it relates to other things that are happening at the same time. + +Kaskada processes events from streams and historic data sources to answer these questions in real-time. + +The power and convenience of Kaskad comes from a new: the Timestream. +Timestreams provide a declarative API like dataframes over the complete temporal context. +Easily combine multiple streams and reason about the complete sequence of events. +Use time-travel to compute training examples from historic data and understand how results change over time. + +## What are "Timestreams"? + +A [Timestream](timestreams) describes how a value changes over time. +In the same way that SQL queries transform tables and graph queries transform nodes and edges, Kaskada queries transform Timestreams. + +In comparison to a timeseries which often contains simple values (e.g., numeric observations) defined at fixed, periodic times (i.e., every minute), a Timestream contains any kind of data (records or collections as well as primitives) and may be defined at arbitrary times corresponding to when the events occur. + +## Getting Started with Timestreams + +Getting started with Timestreams is as simple as `pip` installing the Python library, loading some data and running a query. + +```python +import timestreams as t + +# Read data from a Parquet file. +data = t.sources.Parquet.from_file( + "path_to_file.parquet", + time = "time", + key = "user") +# Get the count of events associated with each user over time, as a dataframe. +data.count().run().to_pandas() +``` + +```{toctree} +:hidden: +:maxdepth: 2 + +installation +timestreams +data_types +entities +aggregation +joins +sources +``` \ No newline at end of file diff --git a/python/docs/source/guide/installation.md b/python/docs/source/guide/installation.md new file mode 100644 index 000000000..5882f92f2 --- /dev/null +++ b/python/docs/source/guide/installation.md @@ -0,0 +1,25 @@ +# Installation + +To install Kaskada, you need to be using Python >= 3.8. +We suggest using 3.11 or newer, since that provides more precise error locations. + +```{code-block} bash +:caption: Installing Kaskada +pip install kaskada>=0.6.0-a.0 +``` + +```{warning} +This version of Kaskada is currently a pre-release, as indicated by the `-a.0` suffix. +It will not be installed by default if you `pip install kaskada`. +You need to either use `pip install --pre kaskada` or specify a specific version, as shown in the example. +``` + +```{admonition} Pip and pip3 and permissions +:class: tip + +Depending on you Python installation and configuration you may have `pip3` instead of `pip` available in your terminal. +If you do have `pip3` replace pip with `pip3` in your command, i.e., `pip3 install kaskada`. + +If you get a permission error when running the `pip` command, you may need to run as an administrator using `sudo pip install kaskada`. +If you don't have administrator access (e.g., in Google Colab, or other hosted environments) you amy use `pip`’s `--user` flag to install the package in your user directory. +``` diff --git a/python/docs/source/guide/joins.md b/python/docs/source/guide/joins.md new file mode 100644 index 000000000..ed46ab1c9 --- /dev/null +++ b/python/docs/source/guide/joins.md @@ -0,0 +1,18 @@ +# Joins + + +## Domains and Implicit Joins + +It is sometimes useful to consider the _domain_ of an expression. +This corresponds to the points in time and entities associated with the points in the expression. +For discrete timestreams, this corresponds to the points at which those values occur. +For continuous timestreams, this corresponds to the points at which the value changes. + +Whenever expressions with two (or more) different domains are used in the same expression they are implicitly joined. +The join is an outer join that contains an event if either (any) of the input domains contained an event. +For any input table that is continuous, the join is `as of` the time of the output, taking the latest value from that input. + + +## Implicit Joins + +## Explicit Lookups \ No newline at end of file diff --git a/python/docs/source/guide/sources.md b/python/docs/source/guide/sources.md new file mode 100644 index 000000000..24cf12ed2 --- /dev/null +++ b/python/docs/source/guide/sources.md @@ -0,0 +1 @@ +# Sources \ No newline at end of file diff --git a/python/docs/source/guide/timestreams.md b/python/docs/source/guide/timestreams.md new file mode 100644 index 000000000..e11a86a64 --- /dev/null +++ b/python/docs/source/guide/timestreams.md @@ -0,0 +1,3 @@ +# Timestreams + +## Continuity \ No newline at end of file diff --git a/python/docs/source/index.md b/python/docs/source/index.md new file mode 100644 index 000000000..32166f490 --- /dev/null +++ b/python/docs/source/index.md @@ -0,0 +1,125 @@ +--- +hide-toc: true +html_theme.sidebar_secondary.remove: true +--- +
+ +

Real-Time AI without the fuss.

+
+

Kaskada is a next-generation streaming engine that connects AI models to real-time & historical data. +

+
+
+ +# Kaskada completes the Real-Time AI stack, providing... + +```{gallery-grid} +:grid-columns: 1 2 2 3 + +- header: "{fas}`timeline;pst-color-primary` Real-time Aggregation" + content: "Precompute model inputs from streaming data with robust data connectors, transformations & aggregations." +- header: "{fas}`binoculars;pst-color-primary` Event Detection" + content: "Trigger pro-active AI behaviors by identifying important activities, as they happen." +- header: "{fas}`backward;pst-color-primary` History Replay" + content: "Backtest and fine-tune from historical data using per-example time travel and point-in-time joins." +``` + + +## Real-time AI in minutes + +Connect and compute over databases, streaming data, _and_ data loaded dynamically using Python.. +Kaskada is seamlessly integrated with Python's ecosystem of AI/ML tooling so you can load data, process it, train and serve models all in the same place. + +There's no infrastructure to provision (and no JVM hiding under the covers), so you can jump right in - check out the [Quick Start](quickstart). + + +## Built for scale and reliability + +Implemented in [Rust](https://www.rust-lang.org/) using [Apache Arrow](https://arrow.apache.org/), Kaskada's compute engine uses columnar data to efficiently execute large historic and high-throughput streaming queries. +Every operation in Kaskada is implemented incrementally, allowing automatic recovery if the process is terminated or killed. + +With Kaskada, most jobs are fast enough to run locally, so it's easy to build and test your real-time queries. +As your needs grow, Kaskada's cloud-native design and support for partitioned execution gives you the volume and throughput you need to scale. +Kaskada was built by core contributors to [Apache Beam](https://beam.apache.org/), [Google Cloud Dataflow](https://cloud.google.com/dataflow), and [Apache Cassandra](https://cassandra.apache.org/), and is under active development + +* * * + +## Example Real-Time App: BeepGPT + +[BeepGPT](https://github.com/kaskada-ai/beep-gpt/tree/main) keeps you in the loop without disturbing your focus. Its personalized, intelligent AI continuously monitors your Slack workspace, alerting you to important conversations and freeing you to concentrate on what’s most important. + +The core of BeepGPT's real-time processing requires only a few lines of code using Kaskada: + +```python +import kaskada as kd +kd.init_session() + +# Bootstrap from historical data +messages = kd.sources.PyList( + rows = pyarrow.parquet.read_table("./messages.parquet") + .to_pylist(), + time_column_name = "ts", + key_column_name = "channel", +) + +# Send each Slack message to Kaskada +def handle_message(client, req): + messages.add_rows(req.payload["event"]) +slack.socket_mode_request_listeners.append(handle_message) +slack.connect() + +# Aggregate multiple messages into a "conversation" +conversations = ( messages + .select("user", "text") + .collect(max=20) +) + +# Handle each conversation as it occurs +async for row in conversations.run(materialize=True).iter_rows_async(): + + # Use a pre-trained model to identify interested users + prompt = "\n\n".join([f' {msg["user"]} --> {msg["text"]} ' for msg in row["result"]]) + res = openai.Completion.create( + model="davinci:ft-personal:coversation-users-full-kaskada-2023-08-05-14-25-30", + prompt=prompt + "\n\n###\n\n", + logprobs=5, + max_tokens=1, + stop=" end", + temperature=0.25, + ) + + # Notify interested users using the Slack API + for user_id in interested_users(res): + notify_user(row, user_id) +``` + +For more details, check out the [BeepGPT Github project](https://github.com/kaskada-ai/beep-gpt). + +* * * + +## Get Started + +Getting started with Kaskda is a `pip install kaskada` away. +Check out the [Quick Start](quickstart) now! + +```{toctree} +:hidden: +:maxdepth: 3 + +quickstart +why +tour +examples/index +guide/index +``` + +```{toctree} +:caption: Reference +:hidden: +:maxdepth: 3 + +reference/timestream/index +reference/windows +reference/sources +reference/results +``` diff --git a/python/docs/source/quickstart.md b/python/docs/source/quickstart.md new file mode 100644 index 000000000..95e019f51 --- /dev/null +++ b/python/docs/source/quickstart.md @@ -0,0 +1,33 @@ +--- +file_format: mystnb +kernelspec: + name: python3 + display_name: Python 3 +mystnb: + execution_mode: cache +--- + +# Quick Start + +```{todo} + +Write the quick start. +``` + +```{code-cell} +import kaskada as kd +kd.init_session() +content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] +) +source = kd.sources.CsvString(content, time_column_name="time", key_column_name="key") +source.run().to_pandas() +``` \ No newline at end of file diff --git a/python/docs/source/reference/results.md b/python/docs/source/reference/results.md new file mode 100644 index 000000000..36ee7970d --- /dev/null +++ b/python/docs/source/reference/results.md @@ -0,0 +1,10 @@ +# Results + +```{eval-rst} +.. currentmodule:: kaskada + +.. autosummary:: + :toctree: apidocs/ + + Result +``` \ No newline at end of file diff --git a/python/docs/source/reference/sources.md b/python/docs/source/reference/sources.md new file mode 100644 index 000000000..245648d1c --- /dev/null +++ b/python/docs/source/reference/sources.md @@ -0,0 +1,16 @@ +# Sources + +```{eval-rst} + +.. automodule:: kaskada.sources + + .. autosummary:: + :toctree: apidocs/sources + + Source + CsvString + JsonlString + Pandas + Parquet + PyList +``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/aggregation.md b/python/docs/source/reference/timestream/aggregation.md new file mode 100644 index 000000000..3f5740fce --- /dev/null +++ b/python/docs/source/reference/timestream/aggregation.md @@ -0,0 +1,29 @@ +# Aggregation + +Timestream aggregations are: + +Cumulative: + They reflect all values up to and including the current time. +Grouped: + They reflect the values for each entity separately. +Windowed: + They reflect the values within a specific [window](../windows.md). + +```{eval-rst} +.. currentmodule:: kaskada + +.. autosummary:: + :toctree: ../apidocs/ + + Timestream.collect + Timestream.count + Timestream.count_if + Timestream.first + Timestream.last + Timestream.max + Timestream.mean + Timestream.min + Timestream.stddev + Timestream.sum + Timestream.variance +``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/arithmetic.md b/python/docs/source/reference/timestream/arithmetic.md new file mode 100644 index 000000000..6a0d2109e --- /dev/null +++ b/python/docs/source/reference/timestream/arithmetic.md @@ -0,0 +1,22 @@ +# Arithmetic + +Timestreams support a variety of arithmetic operations. + +```{note} +Note: In addition to the chainable methods, standard operators are implemented where appropriate. +For instance, `a.add(b)` may be written as `a + b`. +See the notes on the specific functions for more information. +``` + +```{eval-rst} +.. currentmodule:: kaskada + +.. autosummary:: + :toctree: ../apidocs/ + + Timestream.add + Timestream.sub + Timestream.mul + Timestream.div + Timestream.neg +``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/collection.md b/python/docs/source/reference/timestream/collection.md new file mode 100644 index 000000000..99e579692 --- /dev/null +++ b/python/docs/source/reference/timestream/collection.md @@ -0,0 +1,16 @@ +# Arithmetic + +Timestreams allow each point to contain a collection -- a `list` or `map` -- of elements. + +```{eval-rst} +.. currentmodule:: kaskada + +.. autosummary:: + :toctree: ../apidocs/ + + Timestream.__getitem__ + Timestream.flatten + Timestream.index + Timestream.length + Timestream.union +``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/comparison.md b/python/docs/source/reference/timestream/comparison.md new file mode 100644 index 000000000..d25e3d28f --- /dev/null +++ b/python/docs/source/reference/timestream/comparison.md @@ -0,0 +1,27 @@ +# Comparison + +Comparison operations produce boolean Timestreams. + +```{note} +Note: In addition to the chainable methods, standard operators are implemented where appropriate. +For instance, `a.ge(b)` may be written as `a >= b`. +See the notes on the specific functions for more information. + +To respect the semantics of `__eq__` and `__ne__`, `a == b` and `a != b` are *not* overloaded. +``` + +```{eval-rst} +.. currentmodule:: kaskada + +.. autosummary:: + :toctree: ../apidocs/ + + Timestream.eq + Timestream.ge + Timestream.gt + Timestream.le + Timestream.lt + Timestream.ne + Timestream.is_null + Timestream.is_not_null +``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/execution.md b/python/docs/source/reference/timestream/execution.md new file mode 100644 index 000000000..87f06ea92 --- /dev/null +++ b/python/docs/source/reference/timestream/execution.md @@ -0,0 +1,11 @@ +# Execution + +```{eval-rst} +.. currentmodule:: kaskada + +.. autosummary:: + :toctree: ../apidocs/ + + Timestream.preview + Timestream.run +``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/grouping.md b/python/docs/source/reference/timestream/grouping.md new file mode 100644 index 000000000..47575a856 --- /dev/null +++ b/python/docs/source/reference/timestream/grouping.md @@ -0,0 +1,11 @@ +# Grouping + +```{eval-rst} +.. currentmodule:: kaskada + +.. autosummary:: + :toctree: ../apidocs/ + + Timestream.lookup + Timestream.with_key +``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/index.md b/python/docs/source/reference/timestream/index.md new file mode 100644 index 000000000..04c5e37bf --- /dev/null +++ b/python/docs/source/reference/timestream/index.md @@ -0,0 +1,28 @@ +--- +html_theme.sidebar_secondary.remove: +--- + +# Timestream + +```{eval-rst} +.. currentmodule:: kaskada + +.. autoclass:: kaskada.Literal +.. autoclass:: kaskada.Timestream + :exclude-members: __init__ +``` + +```{toctree} +:hidden: + +aggregation +arithmetic +collection +comparison +execution +grouping +logical +misc +records +time +``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/logical.md b/python/docs/source/reference/timestream/logical.md new file mode 100644 index 000000000..e2eeb9927 --- /dev/null +++ b/python/docs/source/reference/timestream/logical.md @@ -0,0 +1,12 @@ +# Logical + +```{eval-rst} +.. currentmodule:: kaskada + +.. autosummary:: + :toctree: ../apidocs/ + + Timestream.and_ + Timestream.or_ + Timestream.not_ +``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/misc.md b/python/docs/source/reference/timestream/misc.md new file mode 100644 index 000000000..bfd18a25a --- /dev/null +++ b/python/docs/source/reference/timestream/misc.md @@ -0,0 +1,16 @@ +# Miscellaneous + +```{eval-rst} +.. currentmodule:: kaskada + +.. autosummary:: + :toctree: ../apidocs/ + + Timestream.cast + Timestream.data_type + Timestream.else_ + Timestream.filter + Timestream.if_ + Timestream.lag + Timestream.null_if +``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/records.md b/python/docs/source/reference/timestream/records.md new file mode 100644 index 000000000..7fe5b10c1 --- /dev/null +++ b/python/docs/source/reference/timestream/records.md @@ -0,0 +1,18 @@ +# Records + +Record operations create, extract or manipulate Timestreams of records. +Comparison operations produce boolean Timestreams. + +```{eval-rst} +.. currentmodule:: kaskada + +.. autosummary:: + :toctree: ../apidocs/ + + Timestream.col + Timestream.select + Timestream.remove + Timestream.extend + Timestream.record + record +``` \ No newline at end of file diff --git a/python/docs/source/reference/timestream/time.md b/python/docs/source/reference/timestream/time.md new file mode 100644 index 000000000..a1e2e3aab --- /dev/null +++ b/python/docs/source/reference/timestream/time.md @@ -0,0 +1,15 @@ +# Time + +```{eval-rst} +.. currentmodule:: kaskada + +.. autosummary:: + :toctree: ../apidocs/ + + Timestream.shift_by + Timestream.shift_to + Timestream.shift_until + Timestream.time + Timestream.seconds_since + Timestream.seconds_since_previous +``` \ No newline at end of file diff --git a/python/docs/source/reference/windows.md b/python/docs/source/reference/windows.md new file mode 100644 index 000000000..007f26a12 --- /dev/null +++ b/python/docs/source/reference/windows.md @@ -0,0 +1,12 @@ +# Windows + +```{eval-rst} +.. currentmodule:: kaskada.windows + +.. autosummary:: + :toctree: apidocs/windows/ + + Since + Sliding + Trailing +``` \ No newline at end of file diff --git a/python/docs/source/tour.md b/python/docs/source/tour.md new file mode 100644 index 000000000..50296b82e --- /dev/null +++ b/python/docs/source/tour.md @@ -0,0 +1,123 @@ +--- +file_format: mystnb +kernelspec: + name: python3 + display_name: Python 3 +mystnb: + execution_mode: cache +--- + +% Level: Beginner +% Goal: Overview of the key features of Kaskada focused on explaining *why* you want them. +% Audience: Someone who has read the landing page and wants to understand what Kaskada can do for them. + +# Tour of Kaskada + +This provides an overview of the key features in Kaskada that enable feature engineering on event-based data. +The [Quick Start](quickstart) has details on how you can quickly get started running Kaskada queries. +For a more complete explanation, see the User Guide. + +This tour uses Kaskada and Plotly to render the illustrations. +The initial setup / data is below. + +```{code-cell} +--- +tags: [hide-cell] +--- +import kaskada as kd +kd.init_session() +single_entity = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-20T16:39:59,A,17,6", + "1996-12-22T16:40:00,A,,9", + "1996-12-23T16:40:01,A,12,", + "1996-12-24T16:40:02,A,,", + ] +) +single_entity = kd.sources.CsvString(single_entity, time_column_name="time", key_column_name="key") +``` + +## Events and Aggregations + +Every Kaskada query operates on one or more _sources_ containing events. +Every event in a source happens at a specific point in time and relates to a specific entity. +A source contains events with the same schema. +Often, each source represents a specific kind of event, such as a login event or purchase. + +It is often convenient to picture temporal data as a sequence of timestamped events. +A natural question to ask about the purchases is the total--or `sum`--of all purchases made. +This is accomplished by _aggregating_ the events. +The results of an aggregation change over time as additional events occur. + +```{code-cell} +--- +tags: [remove-input] +--- +kd.plot.render( + kd.plot.Plot(single_entity.col("m"), name="m"), + kd.plot.Plot(single_entity.col("m").sum(), name="sum of m") +) +``` + +The User Guide has [more details on aggregation](guide/aggregation.md), including how to use windows to control which events are aggregated. + +## Discrete and Continuous +We say that events (and values derived from them) are _discrete_ because they occur at specific in time. +and the results of the aggregation are [_continuous_](guide/timestreams.md#continuity). +In the example, after the purchase with amount 13 the sum was 20. +And it _continued_ to be 20 at every point in time until the next purchase was made, with amount 4. +A continuous value is inclusive of the event that causes the value to change and exclusive of the next change. + +Thus, an aggregation at a given point in time reflects all events that have happened up to (and including) that point in time. +The concept of continuity applies to many other operations in Kaskada, not just aggregations. +This is part of what we mean when we say that Kaskada is a temporal query language. + +## Grouping +Another property of Kaskada is that events are implicitly grouped by _entity_. +In the previous example, we assumed that all purchases were made by the same user. +When the purchases are made by multiple users, there is a natural grouping for the aggregation. +When computing a machine learning feature such as "total purchases", we usually wish to aggregate the events related to a specific user or entity. + +One way to understand this grouping is as a separate stream associated with each entity. +The stream of purchases for each user may be shown separately, as we do here, or it may be pictured flattened into a single stream keyed by user. +The idea of grouped streams as separate, per-entity streams is often useful for understanding the behavior of Kaskada Timestreams. + +```{todo} +Add example of multiple entity aggregation. +``` + +The User Guide has [more details on grouping](guide/entities.md), including how to change the grouping of a Timestream. + +## History and Snapshots + +Since the Timestream describes how values are computed at every point in time, there are several useful ways they may be output. + +For training a model, it is often useful to output historic values matching some `filter`. +These historic points can then be used as training examples, allowing the model to be trained on past points. +This historic output is also useful for visualizing a Timestream at multiple points. + +For serving a model, it is often useful to output the value of a Timestream for every entity at a specific point in time. +This is most often used to output a snapshot at the current time. + +For both kinds of output, it is also useful to be able to select only the points after a specific time. +This would filter out points from the history, or limit the snapshot to only those entities which have changed. + +## Windowed Aggregation + +```{todo} +Update to reflect actual syntax. Include example. +``` + +In addition to the default behavior of aggregating over all events up to a given time, aggregations may be performed over specific windows. +For example, `hourly()` describes periodic windows of an hour. +The aggregation, `sum(Purchases, window=hourly())` would produce the cumulative sum of purchases made since the beginning of the hour. +For example, if there were purchases at 8:45 AM, 9:15 AM and 9:25 AM and 10:02 AM, then the result at 9:25 AM is the sum from 9:00 AM to 9:25 AM, which would include only the events at 9:15 AM and 9:25 AM. + +A non-cumulative windowed aggregation produces values only at the end of a window. +For instance, `sum(Purchases, window=hourly(), cumulative=false)` will produce the sum for the past hour. +With the purchases in the previous example, this would mean that at 9:00 AM an event is produced containing the amount of the purchase at 8:45 AM, and at 10:00 AM an event is produced containing the sum of the purchases at 9:15 AM and 9:25 AM. +A window must be specified when using a non-cumulative aggregation. + +The User Guide [on Aggregation](guide/aggregation.md#windowing) has more information on windowing. \ No newline at end of file diff --git a/python/docs/source/why.md b/python/docs/source/why.md new file mode 100644 index 000000000..ff2e17b6d --- /dev/null +++ b/python/docs/source/why.md @@ -0,0 +1,15 @@ +# Why Kaskada? + +Kaskada is a library for executing temporal queries over event-based data. +An "event" can be any fact about the world associated with a time. +For example, a user signing up for a service, or a customer purchasing a product. +As additional events occur the computed values may change as well. + +Traditional data processing systems are designed to answer questions about the current state of a dataset. +For instance, "how many purchases has a given user made?" +Over time, the user makes additional purchases and the answer *should* change. +With these traditional data processing systems, the answer changes based on when it is asked. + +With Kaskada, the query "how many purchases has a given user made?" is expressed as a _Timestream_. +This represents how the result of that query changes over time for each user. +Kaskada makes it easy to combine Timestreams to produce a new Timestream -- joining points from each input as needed. \ No newline at end of file diff --git a/python/mypy.ini b/python/mypy.ini new file mode 100644 index 000000000..07bbced46 --- /dev/null +++ b/python/mypy.ini @@ -0,0 +1,15 @@ +[mypy] + +[mypy-plotly.*,mypy-desert,marshmallow,nox.*,pytest,pytest_mock,_pytest.*] +ignore_missing_imports = True + +# pyarrow doesn't currently expose mypy stubs: +# +# - https://github.com/apache/arrow/issues/32609 +# - https://github.com/apache/arrow/issues/33113 +# - https://github.com/apache/arrow/issues/36113 +[mypy-pyarrow.*] +ignore_missing_imports = True + +[mypy-plotly.*] +ignore_missing_imports = True \ No newline at end of file diff --git a/python/noxfile.py b/python/noxfile.py new file mode 100644 index 000000000..906d93967 --- /dev/null +++ b/python/noxfile.py @@ -0,0 +1,217 @@ +"""Nox sessions.""" +import os +import shutil +import sys +from pathlib import Path +from typing import Iterable +from typing import Iterator + +import nox + +package = "kaskada" +python_versions = ["3.11", "3.10", "3.9", "3.8"] +nox.needs_version = ">= 2021.6.6" +nox.options.sessions = ( + "check-lint", + "safety", + "mypy", + "tests", + "typeguard", + "xdoctest", + "docs-build", +) + + +@nox.session(name="check-lint", python=python_versions[0]) +def check_lint(session: nox.Session) -> None: + """Lint.""" + args = session.posargs or ["pysrc", "pytests", "docs/source"] + install(session, groups=["lint"], root=False) + session.run("black", "--check", *args) + session.run("flake8", *args) + session.run("isort", "--filter-files", "--check-only", *args) + + # Only do darglint and pydocstyle on pysrc (source) + session.run("darglint", "pysrc") + session.run("pydocstyle", "--convention=numpy", "pysrc") + # No way to run this as a check. + # session.run("pyupgrade", "--py38-plus") + + +@nox.session(name="fix-lint", python=python_versions[0]) +def fix_lint(session: nox.Session) -> None: + """Automatically fix lint issues.""" + args = session.posargs or ["pysrc", "pytests", "docs/source"] + install(session, groups=["lint"], root=False) + session.run("black", *args) + session.run("autoflake", "--in-place", "--remove-all-unused-imports", "--recursive", *args) + session.run("isort", "--filter-files", *args) + session.run("pyupgrade", "--py38-plus") + + +@nox.session(python=python_versions[0]) +def safety(session: nox.Session) -> None: + """Scan dependencies for insecure packages.""" + # NOTE: Pass `extras` to `export_requirements` if the project supports any. + requirements = export_requirements(session) + install(session, groups=["safety"], root=False) + session.run("safety", "check", "--full-report", f"--file={requirements}") + + +@nox.session(python=python_versions) +def mypy(session: nox.Session) -> None: + """Type-check using mypy.""" + args = session.posargs or ["pysrc", "pytests"] + install(session, groups=["typecheck"]) + # Using `--install-types` should make this less picky about missing stubs. + # However, there is a possibility it slows things down, by making mypy + # run twice -- once to determine what types need to be installed, then once + # to check things with those stubs. + session.run("mypy", "--install-types", "--non-interactive", *args) + if not session.posargs: + session.run("mypy", f"--python-executable={sys.executable}", "noxfile.py") + + +@nox.session(python=python_versions) +def tests(session: nox.Session) -> None: + """Run the test suite.""" + install(session, groups=["test"]) + try: + session.run("coverage", "run", "--parallel", "-m", "pytest", *session.posargs) + finally: + if session.interactive: + session.notify("coverage", posargs=[]) + + +@nox.session(python=python_versions[0]) +def coverage(session: nox.Session) -> None: + """Produce the coverage report.""" + args = session.posargs or ["report"] + + install(session, groups=["test"]) + + if not session.posargs and any(Path().glob(".coverage.*")): + session.run("coverage", "combine") + + session.run("coverage", *args) + + +@nox.session(python=python_versions[0]) +def typeguard(session: nox.Session) -> None: + """Runtime type checking using Typeguard.""" + install(session, groups=["typecheck", "test"]) + session.run("pytest", f"--typeguard-packages={package}", *session.posargs) + + +@nox.session(python=python_versions) +def xdoctest(session: nox.Session) -> None: + """Run examples with xdoctest.""" + if session.posargs: + args = [package, *session.posargs] + else: + args = [f"--modname={package}", "--command=all"] + if "FORCE_COLOR" in os.environ: + args.append("--colored=1") + + install(session, groups=["test"]) + session.run("python", "-m", "xdoctest", *args) + +@nox.session(name="docs-build", python=python_versions[0]) +def docs_build(session: nox.Session) -> None: + """Build the documentation.""" + args = session.posargs or ["docs/source", "docs/_build", "-j", "auto", "-W"] + if not session.posargs and "FORCE_COLOR" in os.environ: + args.insert(0, "--color") + + install(session, groups=["typecheck", "docs"]) + + build_dir = Path("docs", "_build") + if build_dir.exists(): + shutil.rmtree(build_dir) + + session.run("sphinx-build", *args) + + +@nox.session(python=python_versions[0]) +def docs(session: nox.Session) -> None: + """Build and serve the documentation with live reloading on file changes.""" + args = ["--open-browser", "docs/source", "docs/_build", "-j", "auto", "--ignore", "*/apidocs/*"] + install(session, groups=["typecheck", "docs"]) + + build_dir = Path("docs", "_build") + if build_dir.exists(): + shutil.rmtree(build_dir) + + session.run("sphinx-autobuild", *args) + + +def install(session: nox.Session, *, groups: Iterable[str], root: bool = True) -> None: + """Install the dependency groups using Poetry. + This function installs the given dependency groups into the session's + virtual environment. When ``root`` is true (the default), the function + also installs the root package's default dependencies. + + The root package is installed using `maturin develop`. + + Args: + session: The Session object. + groups: The dependency groups to install. + root: Install the root package. + """ + session.run_always( + "poetry", + "install", + "--no-root", + "--sync", + "--{}={}".format("only" if not root else "with", ",".join(groups)), + external=True, + ) + if root: + session.run_always("maturin", "develop", "--profile", "dev") + + +def export_requirements(session: nox.Session, *, extras: Iterable[str] = ()) -> Path: + """Export a requirements file from Poetry. + This function uses ``poetry export`` to generate a requirements file + containing the default dependencies at the versions specified in + ``poetry.lock``. + + Args: + session: The Session object. + extras: Extras supported by the project. + Returns: + The path to the requirements file. + """ + # XXX Use poetry-export-plugin with dependency groups + output = session.run_always( + "poetry", + "export", + "--format=requirements.txt", + "--without-hashes", + *[f"--extras={extra}" for extra in extras], + external=True, + silent=True, + stderr=None, + ) + + if output is None: + session.skip( + "The command `poetry export` was not executed" + " (a possible cause is specifying `--no-install`)" + ) + + assert isinstance(output, str) # noqa: S101 + + def _stripwarnings(lines: Iterable[str]) -> Iterator[str]: + for line in lines: + if line.startswith("Warning:"): + print(line, file=sys.stderr) + continue + yield line + + text = "".join(_stripwarnings(output.splitlines(keepends=True))) + + path = session.cache_dir / "requirements.txt" + path.write_text(text) + + return path \ No newline at end of file diff --git a/python/poetry.lock b/python/poetry.lock new file mode 100644 index 000000000..c8ac8fbf6 --- /dev/null +++ b/python/poetry.lock @@ -0,0 +1,3170 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "accessible-pygments" +version = "0.0.4" +description = "A collection of accessible pygments styles" +optional = false +python-versions = "*" +files = [ + {file = "accessible-pygments-0.0.4.tar.gz", hash = "sha256:e7b57a9b15958e9601c7e9eb07a440c813283545a20973f2574a5f453d0e953e"}, + {file = "accessible_pygments-0.0.4-py2.py3-none-any.whl", hash = "sha256:416c6d8c1ea1c5ad8701903a20fcedf953c6e720d64f33dc47bfb2d3f2fa4e8d"}, +] + +[package.dependencies] +pygments = ">=1.5" + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "appnope" +version = "0.1.3" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = "*" +files = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] + +[[package]] +name = "argcomplete" +version = "3.1.1" +description = "Bash tab completion for argparse" +optional = false +python-versions = ">=3.6" +files = [ + {file = "argcomplete-3.1.1-py3-none-any.whl", hash = "sha256:35fa893a88deea85ea7b20d241100e64516d6af6d7b0ae2bed1d263d26f70948"}, + {file = "argcomplete-3.1.1.tar.gz", hash = "sha256:6c4c563f14f01440aaffa3eae13441c5db2357b5eec639abe7c0b15334627dff"}, +] + +[package.extras] +test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] + +[[package]] +name = "asttokens" +version = "2.2.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, + {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, +] + +[package.dependencies] +six = "*" + +[package.extras] +test = ["astroid", "pytest"] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "autoflake" +version = "2.2.0" +description = "Removes unused imports and unused variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "autoflake-2.2.0-py3-none-any.whl", hash = "sha256:de409b009a34c1c2a7cc2aae84c4c05047f9773594317c6a6968bd497600d4a0"}, + {file = "autoflake-2.2.0.tar.gz", hash = "sha256:62e1f74a0fdad898a96fee6f99fe8241af90ad99c7110c884b35855778412251"}, +] + +[package.dependencies] +pyflakes = ">=3.0.0" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} + +[[package]] +name = "babel" +version = "2.12.1" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +optional = false +python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.2" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "23.7.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, + {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, + {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, + {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, + {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, + {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, + {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, + {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, + {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, + {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, + {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.2.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "colorlog" +version = "6.7.0" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +files = [ + {file = "colorlog-6.7.0-py2.py3-none-any.whl", hash = "sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662"}, + {file = "colorlog-6.7.0.tar.gz", hash = "sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "comm" +version = "0.1.4" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +optional = false +python-versions = ">=3.6" +files = [ + {file = "comm-0.1.4-py3-none-any.whl", hash = "sha256:6d52794cba11b36ed9860999cd10fd02d6b2eac177068fdd585e1e2f8a96e67a"}, + {file = "comm-0.1.4.tar.gz", hash = "sha256:354e40a59c9dd6db50c5cc6b4acc887d82e9603787f83b68c01a80a923984d15"}, +] + +[package.dependencies] +traitlets = ">=4" + +[package.extras] +lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff (>=0.0.156)"] +test = ["pytest"] +typing = ["mypy (>=0.990)"] + +[[package]] +name = "coverage" +version = "7.3.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db76a1bcb51f02b2007adacbed4c88b6dee75342c37b05d1822815eed19edee5"}, + {file = "coverage-7.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c02cfa6c36144ab334d556989406837336c1d05215a9bdf44c0bc1d1ac1cb637"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477c9430ad5d1b80b07f3c12f7120eef40bfbf849e9e7859e53b9c93b922d2af"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2ee86ca75f9f96072295c5ebb4ef2a43cecf2870b0ca5e7a1cbdd929cf67e1"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68d8a0426b49c053013e631c0cdc09b952d857efa8f68121746b339912d27a12"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3eb0c93e2ea6445b2173da48cb548364f8f65bf68f3d090404080d338e3a689"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:90b6e2f0f66750c5a1178ffa9370dec6c508a8ca5265c42fbad3ccac210a7977"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:96d7d761aea65b291a98c84e1250cd57b5b51726821a6f2f8df65db89363be51"}, + {file = "coverage-7.3.0-cp310-cp310-win32.whl", hash = "sha256:63c5b8ecbc3b3d5eb3a9d873dec60afc0cd5ff9d9f1c75981d8c31cfe4df8527"}, + {file = "coverage-7.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:97c44f4ee13bce914272589b6b41165bbb650e48fdb7bd5493a38bde8de730a1"}, + {file = "coverage-7.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74c160285f2dfe0acf0f72d425f3e970b21b6de04157fc65adc9fd07ee44177f"}, + {file = "coverage-7.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b543302a3707245d454fc49b8ecd2c2d5982b50eb63f3535244fd79a4be0c99d"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad0f87826c4ebd3ef484502e79b39614e9c03a5d1510cfb623f4a4a051edc6fd"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13c6cbbd5f31211d8fdb477f0f7b03438591bdd077054076eec362cf2207b4a7"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac440c43e9b479d1241fe9d768645e7ccec3fb65dc3a5f6e90675e75c3f3e3a"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c9834d5e3df9d2aba0275c9f67989c590e05732439b3318fa37a725dff51e74"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4c8e31cf29b60859876474034a83f59a14381af50cbe8a9dbaadbf70adc4b214"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7a9baf8e230f9621f8e1d00c580394a0aa328fdac0df2b3f8384387c44083c0f"}, + {file = "coverage-7.3.0-cp311-cp311-win32.whl", hash = "sha256:ccc51713b5581e12f93ccb9c5e39e8b5d4b16776d584c0f5e9e4e63381356482"}, + {file = "coverage-7.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:887665f00ea4e488501ba755a0e3c2cfd6278e846ada3185f42d391ef95e7e70"}, + {file = "coverage-7.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d000a739f9feed900381605a12a61f7aaced6beae832719ae0d15058a1e81c1b"}, + {file = "coverage-7.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59777652e245bb1e300e620ce2bef0d341945842e4eb888c23a7f1d9e143c446"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9737bc49a9255d78da085fa04f628a310c2332b187cd49b958b0e494c125071"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5247bab12f84a1d608213b96b8af0cbb30d090d705b6663ad794c2f2a5e5b9fe"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ac9a1de294773b9fa77447ab7e529cf4fe3910f6a0832816e5f3d538cfea9a"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:85b7335c22455ec12444cec0d600533a238d6439d8d709d545158c1208483873"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:36ce5d43a072a036f287029a55b5c6a0e9bd73db58961a273b6dc11a2c6eb9c2"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:211a4576e984f96d9fce61766ffaed0115d5dab1419e4f63d6992b480c2bd60b"}, + {file = "coverage-7.3.0-cp312-cp312-win32.whl", hash = "sha256:56afbf41fa4a7b27f6635bc4289050ac3ab7951b8a821bca46f5b024500e6321"}, + {file = "coverage-7.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f297e0c1ae55300ff688568b04ff26b01c13dfbf4c9d2b7d0cb688ac60df479"}, + {file = "coverage-7.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac0dec90e7de0087d3d95fa0533e1d2d722dcc008bc7b60e1143402a04c117c1"}, + {file = "coverage-7.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:438856d3f8f1e27f8e79b5410ae56650732a0dcfa94e756df88c7e2d24851fcd"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1084393c6bda8875c05e04fce5cfe1301a425f758eb012f010eab586f1f3905e"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49ab200acf891e3dde19e5aa4b0f35d12d8b4bd805dc0be8792270c71bd56c54"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67e6bbe756ed458646e1ef2b0778591ed4d1fcd4b146fc3ba2feb1a7afd4254"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f39c49faf5344af36042b293ce05c0d9004270d811c7080610b3e713251c9b0"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7df91fb24c2edaabec4e0eee512ff3bc6ec20eb8dccac2e77001c1fe516c0c84"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:34f9f0763d5fa3035a315b69b428fe9c34d4fc2f615262d6be3d3bf3882fb985"}, + {file = "coverage-7.3.0-cp38-cp38-win32.whl", hash = "sha256:bac329371d4c0d456e8d5f38a9b0816b446581b5f278474e416ea0c68c47dcd9"}, + {file = "coverage-7.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b859128a093f135b556b4765658d5d2e758e1fae3e7cc2f8c10f26fe7005e543"}, + {file = "coverage-7.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed8d310afe013db1eedd37176d0839dc66c96bcfcce8f6607a73ffea2d6ba"}, + {file = "coverage-7.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61260ec93f99f2c2d93d264b564ba912bec502f679793c56f678ba5251f0393"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97af9554a799bd7c58c0179cc8dbf14aa7ab50e1fd5fa73f90b9b7215874ba28"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3558e5b574d62f9c46b76120a5c7c16c4612dc2644c3d48a9f4064a705eaee95"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37d5576d35fcb765fca05654f66aa71e2808d4237d026e64ac8b397ffa66a56a"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07ea61bcb179f8f05ffd804d2732b09d23a1238642bf7e51dad62082b5019b34"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:80501d1b2270d7e8daf1b64b895745c3e234289e00d5f0e30923e706f110334e"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4eddd3153d02204f22aef0825409091a91bf2a20bce06fe0f638f5c19a85de54"}, + {file = "coverage-7.3.0-cp39-cp39-win32.whl", hash = "sha256:2d22172f938455c156e9af2612650f26cceea47dc86ca048fa4e0b2d21646ad3"}, + {file = "coverage-7.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:60f64e2007c9144375dd0f480a54d6070f00bb1a28f65c408370544091c9bc9e"}, + {file = "coverage-7.3.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:5492a6ce3bdb15c6ad66cb68a0244854d9917478877a25671d70378bdc8562d0"}, + {file = "coverage-7.3.0.tar.gz", hash = "sha256:49dbb19cdcafc130f597d9e04a29d0a032ceedf729e41b181f51cd170e6ee865"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "darglint" +version = "1.8.1" +description = "A utility for ensuring Google-style docstrings stay up to date with the source code." +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"}, + {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, +] + +[[package]] +name = "debugpy" +version = "1.6.7.post1" +description = "An implementation of the Debug Adapter Protocol for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "debugpy-1.6.7.post1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:903bd61d5eb433b6c25b48eae5e23821d4c1a19e25c9610205f5aeaccae64e32"}, + {file = "debugpy-1.6.7.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16882030860081e7dd5aa619f30dec3c2f9a421e69861125f83cc372c94e57d"}, + {file = "debugpy-1.6.7.post1-cp310-cp310-win32.whl", hash = "sha256:eea8d8cfb9965ac41b99a61f8e755a8f50e9a20330938ad8271530210f54e09c"}, + {file = "debugpy-1.6.7.post1-cp310-cp310-win_amd64.whl", hash = "sha256:85969d864c45f70c3996067cfa76a319bae749b04171f2cdeceebe4add316155"}, + {file = "debugpy-1.6.7.post1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:890f7ab9a683886a0f185786ffbda3b46495c4b929dab083b8c79d6825832a52"}, + {file = "debugpy-1.6.7.post1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4ac7a4dba28801d184b7fc0e024da2635ca87d8b0a825c6087bb5168e3c0d28"}, + {file = "debugpy-1.6.7.post1-cp37-cp37m-win32.whl", hash = "sha256:3370ef1b9951d15799ef7af41f8174194f3482ee689988379763ef61a5456426"}, + {file = "debugpy-1.6.7.post1-cp37-cp37m-win_amd64.whl", hash = "sha256:65b28435a17cba4c09e739621173ff90c515f7b9e8ea469b92e3c28ef8e5cdfb"}, + {file = "debugpy-1.6.7.post1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:92b6dae8bfbd497c90596bbb69089acf7954164aea3228a99d7e43e5267f5b36"}, + {file = "debugpy-1.6.7.post1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72f5d2ecead8125cf669e62784ef1e6300f4067b0f14d9f95ee00ae06fc7c4f7"}, + {file = "debugpy-1.6.7.post1-cp38-cp38-win32.whl", hash = "sha256:f0851403030f3975d6e2eaa4abf73232ab90b98f041e3c09ba33be2beda43fcf"}, + {file = "debugpy-1.6.7.post1-cp38-cp38-win_amd64.whl", hash = "sha256:3de5d0f97c425dc49bce4293df6a04494309eedadd2b52c22e58d95107e178d9"}, + {file = "debugpy-1.6.7.post1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38651c3639a4e8bbf0ca7e52d799f6abd07d622a193c406be375da4d510d968d"}, + {file = "debugpy-1.6.7.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:038c51268367c9c935905a90b1c2d2dbfe304037c27ba9d19fe7409f8cdc710c"}, + {file = "debugpy-1.6.7.post1-cp39-cp39-win32.whl", hash = "sha256:4b9eba71c290852f959d2cf8a03af28afd3ca639ad374d393d53d367f7f685b2"}, + {file = "debugpy-1.6.7.post1-cp39-cp39-win_amd64.whl", hash = "sha256:973a97ed3b434eab0f792719a484566c35328196540676685c975651266fccf9"}, + {file = "debugpy-1.6.7.post1-py2.py3-none-any.whl", hash = "sha256:1093a5c541af079c13ac8c70ab8b24d1d35c8cacb676306cf11e57f699c02926"}, + {file = "debugpy-1.6.7.post1.zip", hash = "sha256:fe87ec0182ef624855d05e6ed7e0b7cb1359d2ffa2a925f8ec2d22e98b75d0ca"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "distlib" +version = "0.3.7" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, +] + +[[package]] +name = "docutils" +version = "0.19" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, +] + +[[package]] +name = "dotty-dict" +version = "1.3.1" +description = "Dictionary wrapper for quick access to deeply nested keys." +optional = false +python-versions = ">=3.5,<4.0" +files = [ + {file = "dotty_dict-1.3.1-py3-none-any.whl", hash = "sha256:5022d234d9922f13aa711b4950372a06a6d64cb6d6db9ba43d0ba133ebfce31f"}, + {file = "dotty_dict-1.3.1.tar.gz", hash = "sha256:4b016e03b8ae265539757a53eba24b9bfda506fb94fbce0bee843c6f05541a15"}, +] + +[[package]] +name = "dparse" +version = "0.6.3" +description = "A parser for Python dependency files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "dparse-0.6.3-py3-none-any.whl", hash = "sha256:0d8fe18714056ca632d98b24fbfc4e9791d4e47065285ab486182288813a5318"}, + {file = "dparse-0.6.3.tar.gz", hash = "sha256:27bb8b4bcaefec3997697ba3f6e06b2447200ba273c0b085c3d012a04571b528"}, +] + +[package.dependencies] +packaging = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +conda = ["pyyaml"] +pipenv = ["pipenv (<=2022.12.19)"] + +[[package]] +name = "exceptiongroup" +version = "1.1.3" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "1.2.0" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = "*" +files = [ + {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, + {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, +] + +[package.extras] +tests = ["asttokens", "littleutils", "pytest", "rich"] + +[[package]] +name = "fastjsonschema" +version = "2.18.0" +description = "Fastest Python implementation of JSON schema" +optional = false +python-versions = "*" +files = [ + {file = "fastjsonschema-2.18.0-py3-none-any.whl", hash = "sha256:128039912a11a807068a7c87d0da36660afbfd7202780db26c4aa7153cfdc799"}, + {file = "fastjsonschema-2.18.0.tar.gz", hash = "sha256:e820349dd16f806e4bd1467a138dced9def4bc7d6213a34295272a6cac95b5bd"}, +] + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + +[[package]] +name = "filelock" +version = "3.12.2" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.7" +files = [ + {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, + {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, +] + +[package.extras] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "flake8-bugbear" +version = "23.7.10" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-bugbear-23.7.10.tar.gz", hash = "sha256:0ebdc7d8ec1ca8bd49347694562381f099f4de2f8ec6bda7a7dca65555d9e0d4"}, + {file = "flake8_bugbear-23.7.10-py3-none-any.whl", hash = "sha256:d99d005114020fbef47ed5e4aebafd22f167f9a0fbd0d8bf3c9e90612cb25c34"}, +] + +[package.dependencies] +attrs = ">=19.2.0" +flake8 = ">=6.0.0" + +[package.extras] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] + +[[package]] +name = "flake8-rst-docstrings" +version = "0.3.0" +description = "Python docstring reStructuredText (RST) validator for flake8" +optional = false +python-versions = ">=3.7" +files = [ + {file = "flake8-rst-docstrings-0.3.0.tar.gz", hash = "sha256:d1ce22b4bd37b73cd86b8d980e946ef198cfcc18ed82fedb674ceaa2f8d1afa4"}, + {file = "flake8_rst_docstrings-0.3.0-py3-none-any.whl", hash = "sha256:f8c3c6892ff402292651c31983a38da082480ad3ba253743de52989bdc84ca1c"}, +] + +[package.dependencies] +flake8 = ">=3" +pygments = "*" +restructuredtext-lint = "*" + +[package.extras] +develop = ["build", "twine"] + +[[package]] +name = "gitdb" +version = "4.0.10" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.32" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.32-py3-none-any.whl", hash = "sha256:e3d59b1c2c6ebb9dfa7a184daf3b6dd4914237e7488a1730a6d8f6f5d0b4187f"}, + {file = "GitPython-3.1.32.tar.gz", hash = "sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[[package]] +name = "greenlet" +version = "2.0.2" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, + {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, + {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, + {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, + {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, + {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, + {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, + {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, + {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, + {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, + {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, + {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, + {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, + {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, + {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, + {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, + {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, + {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, + {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, + {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, + {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +] + +[package.extras] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["objgraph", "psutil"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.8.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, + {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "5.7.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_resources-5.7.0-py3-none-any.whl", hash = "sha256:9c4c12f9ef4329a00c1f72f30bddb4f10e582766b8705980bb76356b3ba8bc91"}, + {file = "importlib_resources-5.7.0.tar.gz", hash = "sha256:f6a4a9949f36ae289facec8dac1a899a54cbaf6a135cc8552d2c8b69209c06a3"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipykernel" +version = "6.25.1" +description = "IPython Kernel for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.25.1-py3-none-any.whl", hash = "sha256:c8a2430b357073b37c76c21c52184db42f6b4b0e438e1eb7df3c4440d120497c"}, + {file = "ipykernel-6.25.1.tar.gz", hash = "sha256:050391364c0977e768e354bdb60cbbfbee7cbb943b1af1618382021136ffd42f"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=20" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "ipython" +version = "8.12.2" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipython-8.12.2-py3-none-any.whl", hash = "sha256:ea8801f15dfe4ffb76dea1b09b847430ffd70d827b41735c64a0638a04103bfc"}, + {file = "ipython-8.12.2.tar.gz", hash = "sha256:c7b80eb7f5a855a88efc971fda506ff7a91c280b42cdae26643e0f601ea281ea"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "jedi" +version = "0.19.0" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.0-py2.py3-none-any.whl", hash = "sha256:cb8ce23fbccff0025e9386b5cf85e892f94c9b822378f8da49970471335ac64e"}, + {file = "jedi-0.19.0.tar.gz", hash = "sha256:bcf9894f1753969cbac8022a8c2eaee06bfa3724e4192470aaffe7eb6272b0c4"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.19.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.19.0-py3-none-any.whl", hash = "sha256:043dc26a3845ff09d20e4420d6012a9c91c9aa8999fa184e7efcfeccb41e32cb"}, + {file = "jsonschema-4.19.0.tar.gz", hash = "sha256:6e1e7569ac13be8139b2dd2c21a55d350066ee3f80df06c608b398cdc6f30e8f"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +jsonschema-specifications = ">=2023.03.6" +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.7.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, + {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, +] + +[package.dependencies] +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +referencing = ">=0.28.0" + +[[package]] +name = "jupyter-cache" +version = "0.6.1" +description = "A defined interface for working with a cache of jupyter notebooks." +optional = false +python-versions = "~=3.8" +files = [ + {file = "jupyter-cache-0.6.1.tar.gz", hash = "sha256:26f83901143edf4af2f3ff5a91e2d2ad298e46e2cee03c8071d37a23a63ccbfc"}, + {file = "jupyter_cache-0.6.1-py3-none-any.whl", hash = "sha256:2fce7d4975805c77f75bdfc1bc2e82bc538b8e5b1af27f2f5e06d55b9f996a82"}, +] + +[package.dependencies] +attrs = "*" +click = "*" +importlib-metadata = "*" +nbclient = ">=0.2,<0.8" +nbformat = "*" +pyyaml = "*" +sqlalchemy = ">=1.3.12,<3" +tabulate = "*" + +[package.extras] +cli = ["click-log"] +code-style = ["pre-commit (>=2.12,<4.0)"] +rtd = ["ipykernel", "jupytext", "myst-nb", "nbdime", "sphinx-book-theme", "sphinx-copybutton"] +testing = ["coverage", "ipykernel", "jupytext", "matplotlib", "nbdime", "nbformat (>=5.1)", "numpy", "pandas", "pytest (>=6,<8)", "pytest-cov", "pytest-regressions", "sympy"] + +[[package]] +name = "jupyter-client" +version = "8.3.0" +description = "Jupyter protocol implementation and client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.3.0-py3-none-any.whl", hash = "sha256:7441af0c0672edc5d28035e92ba5e32fadcfa8a4e608a434c228836a89df6158"}, + {file = "jupyter_client-8.3.0.tar.gz", hash = "sha256:3af69921fe99617be1670399a0b857ad67275eefcfa291e2c81a160b7b650f5f"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.3.1" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.3.1-py3-none-any.whl", hash = "sha256:ae9036db959a71ec1cac33081eeb040a79e681f08ab68b0883e9a676c7a90dce"}, + {file = "jupyter_core-5.3.1.tar.gz", hash = "sha256:5ba5c7938a7f97a6b0481463f7ff0dbac7c15ba48cf46fa4035ca6e838aa1aba"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "livereload" +version = "2.6.3" +description = "Python LiveReload is an awesome tool for web developers" +optional = false +python-versions = "*" +files = [ + {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, + {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, +] + +[package.dependencies] +six = "*" +tornado = {version = "*", markers = "python_version > \"2.7\""} + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "maturin" +version = "1.2.3" +description = "Build and publish crates with pyo3, rust-cpython and cffi bindings as well as rust binaries as python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "maturin-1.2.3-py3-none-linux_armv6l.whl", hash = "sha256:7b6484d7c94d6d6188ccf4ed8a6167cb8f1e98f13c653bfa715c9ee9eac4be0c"}, + {file = "maturin-1.2.3-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b44fb4d1d116d69ce7c713c22b322debd5fc222db09eb1cdfa0e1c1b7f3e2e9c"}, + {file = "maturin-1.2.3-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:735375559c8c75bdc910c377f6dcc9197637ee2a312a60e361ef0e08fb31fcb5"}, + {file = "maturin-1.2.3-py3-none-manylinux_2_12_i686.manylinux2010_i686.musllinux_1_1_i686.whl", hash = "sha256:619f4f7b7e3a842a4f6cbae1d138a71d67aeba460f6217b38f2150ad53bb4dc1"}, + {file = "maturin-1.2.3-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.musllinux_1_1_x86_64.whl", hash = "sha256:4e1035c102f87aa3e6733d28c2248b7303afa11f93a21f2ac88636e0430b0258"}, + {file = "maturin-1.2.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:aef8ddb9e775dd3781e6f56e10cc3d26f648735723ab5c47ce938542b9b5bbb6"}, + {file = "maturin-1.2.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:23c6fdc5750b96fd10d28c125dd795e9b75cd5cd768c8a403dc91dfde641243a"}, + {file = "maturin-1.2.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.musllinux_1_1_ppc64le.whl", hash = "sha256:40b4d69f9e5be5eacedd80ae496fae67cfd71d386b5604f7ce2e9ac9d34d0460"}, + {file = "maturin-1.2.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:826e90533789ff6dd3f3f0541fbe46c3549ec985d19edceff7913f9bdf9c3131"}, + {file = "maturin-1.2.3-py3-none-win32.whl", hash = "sha256:e414e56896d904c255e80190ac81fa8299b1d7df52f7e2e3f10df33f92784fd8"}, + {file = "maturin-1.2.3-py3-none-win_amd64.whl", hash = "sha256:1f5516dbe68491bf4bf7e047caf139596a3cd9d4a5ec8bb43034980e3710e550"}, + {file = "maturin-1.2.3-py3-none-win_arm64.whl", hash = "sha256:7d47e9a0fe56d25de98a2bed7d1c75975516e3a25fa5b552b2ee61fb1add41c0"}, + {file = "maturin-1.2.3.tar.gz", hash = "sha256:ef3f42af453d64f233b99543c3001bee645019a9c2022c7972210a9cacb5301f"}, +] + +[package.dependencies] +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +patchelf = ["patchelf"] +zig = ["ziglang (>=0.10.0,<0.11.0)"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.4.0" +description = "Collection of plugins for markdown-it-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"}, + {file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<4.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["myst-parser", "sphinx-book-theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.5.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, + {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, + {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, + {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, + {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, + {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, + {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, + {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, + {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, + {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, + {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, + {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, + {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, + {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, + {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, + {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, + {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, + {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "myst-nb" +version = "0.18.0" +description = "A Jupyter Notebook Sphinx reader built on top of the MyST markdown parser." +optional = false +python-versions = ">=3.8" +files = [] +develop = false + +[package.dependencies] +importlib_metadata = "*" +ipykernel = "*" +ipython = "*" +jupyter-cache = ">=0.5,<0.7" +myst-parser = ">=0.18.0" +nbclient = "*" +nbformat = ">=5.0,<6.0" +pyyaml = "*" +sphinx = ">=4" +typing-extensions = "*" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["alabaster", "altair", "bokeh", "coconut (>=1.4.3,<3.1.0)", "ipykernel (>=5.5,<7.0)", "ipywidgets", "jupytext (>=1.11.2,<1.15.0)", "matplotlib", "numpy", "pandas", "plotly", "sphinx-book-theme (>=0.3)", "sphinx-copybutton", "sphinx-design (>=0.4.0,<0.5.0)", "sphinxcontrib-bibtex", "sympy"] +testing = ["beautifulsoup4", "coverage (>=6.4,<8.0)", "ipykernel (>=5.5,<7.0)", "ipython (!=8.1.0,<8.15)", "ipywidgets (>=8)", "jupytext (>=1.11.2,<1.15.0)", "matplotlib (>=3.5.3,<3.6)", "nbdime", "numpy", "pandas", "pytest (>=7.1,<8.0)", "pytest-cov (>=3,<5)", "pytest-param-files (>=0.3.3,<0.4.0)", "pytest-regressions", "sympy (>=1.10.1)"] + +[package.source] +type = "git" +url = "https://github.com/executablebooks/MyST-NB.git" +reference = "3d6a5d1" +resolved_reference = "3d6a5d16d808dd9dfe95f71e5c0b6f08c7c3cc00" + +[[package]] +name = "myst-parser" +version = "2.0.0" +description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," +optional = false +python-versions = ">=3.8" +files = [ + {file = "myst_parser-2.0.0-py3-none-any.whl", hash = "sha256:7c36344ae39c8e740dad7fdabf5aa6fc4897a813083c6cc9990044eb93656b14"}, + {file = "myst_parser-2.0.0.tar.gz", hash = "sha256:ea929a67a6a0b1683cdbe19b8d2e724cd7643f8aa3e7bb18dd65beac3483bead"}, +] + +[package.dependencies] +docutils = ">=0.16,<0.21" +jinja2 = "*" +markdown-it-py = ">=3.0,<4.0" +mdit-py-plugins = ">=0.4,<1.0" +pyyaml = "*" +sphinx = ">=6,<8" + +[package.extras] +code-style = ["pre-commit (>=3.0,<4.0)"] +linkify = ["linkify-it-py (>=2.0,<3.0)"] +rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.8.2,<0.9.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=7,<8)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,<0.4.0)"] + +[[package]] +name = "nbclient" +version = "0.7.4" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "nbclient-0.7.4-py3-none-any.whl", hash = "sha256:c817c0768c5ff0d60e468e017613e6eae27b6fa31e43f905addd2d24df60c125"}, + {file = "nbclient-0.7.4.tar.gz", hash = "sha256:d447f0e5a4cfe79d462459aec1b3dc5c2e9152597262be8ee27f7d4c02566a0d"}, +] + +[package.dependencies] +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +nbformat = ">=5.1" +traitlets = ">=5.3" + +[package.extras] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] +test = ["flaky", "ipykernel", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] + +[[package]] +name = "nbformat" +version = "5.9.2" +description = "The Jupyter Notebook format" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbformat-5.9.2-py3-none-any.whl", hash = "sha256:1c5172d786a41b82bcfd0c23f9e6b6f072e8fb49c39250219e4acfff1efe89e9"}, + {file = "nbformat-5.9.2.tar.gz", hash = "sha256:5f98b5ba1997dff175e77e0c17d5c10a96eaed2cbd1de3533d1fc35d5e111192"}, +] + +[package.dependencies] +fastjsonschema = "*" +jsonschema = ">=2.6" +jupyter-core = "*" +traitlets = ">=5.1" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] + +[[package]] +name = "nest-asyncio" +version = "1.5.7" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"}, + {file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"}, +] + +[[package]] +name = "nox" +version = "2023.4.22" +description = "Flexible test automation." +optional = false +python-versions = ">=3.7" +files = [ + {file = "nox-2023.4.22-py3-none-any.whl", hash = "sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891"}, + {file = "nox-2023.4.22.tar.gz", hash = "sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f"}, +] + +[package.dependencies] +argcomplete = ">=1.9.4,<4.0" +colorlog = ">=2.6.1,<7.0.0" +packaging = ">=20.9" +virtualenv = ">=14" + +[package.extras] +tox-to-nox = ["jinja2", "tox (<4)"] + +[[package]] +name = "numpy" +version = "1.24.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "pandas" +version = "2.0.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, + {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, + {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, + {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, + {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, + {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, + {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, + {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, + {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, + {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, + {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] +aws = ["s3fs (>=2021.08.0)"] +clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] +compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] +computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2021.07.0)"] +gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] +hdf5 = ["tables (>=3.6.1)"] +html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] +mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] +spss = ["pyreadstat (>=1.1.2)"] +sql-other = ["SQLAlchemy (>=1.4.16)"] +test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.6.3)"] + +[[package]] +name = "pandas-stubs" +version = "2.0.2.230605" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas_stubs-2.0.2.230605-py3-none-any.whl", hash = "sha256:39106b602f3cb6dc5f728b84e1b32bde6ecf41ee34ee714c66228009609fbada"}, + {file = "pandas_stubs-2.0.2.230605.tar.gz", hash = "sha256:624c7bb06d38145a44b61be459ccd19b038e0bf20364a025ecaab78fea65e858"}, +] + +[package.dependencies] +numpy = ">=1.24.3" +types-pytz = ">=2022.1.1" + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "pep8-naming" +version = "0.13.3" +description = "Check PEP-8 naming conventions, plugin for flake8" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971"}, + {file = "pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80"}, +] + +[package.dependencies] +flake8 = ">=5.0.0" + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +optional = false +python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] + +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] + +[[package]] +name = "platformdirs" +version = "3.10.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "plotly" +version = "5.16.1" +description = "An open-source, interactive data visualization library for Python" +optional = true +python-versions = ">=3.6" +files = [ + {file = "plotly-5.16.1-py2.py3-none-any.whl", hash = "sha256:19cc34f339acd4e624177806c14df22f388f23fb70658b03aad959a0e650a0dc"}, + {file = "plotly-5.16.1.tar.gz", hash = "sha256:295ac25edeb18c893abb71dcadcea075b78fd6fdf07cee4217a4e1009667925b"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + +[[package]] +name = "pluggy" +version = "1.2.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.39" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, + {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psutil" +version = "5.9.5" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, + {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, + {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, + {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, + {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, + {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, + {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, + {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pyarrow" +version = "12.0.1" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyarrow-12.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:6d288029a94a9bb5407ceebdd7110ba398a00412c5b0155ee9813a40d246c5df"}, + {file = "pyarrow-12.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345e1828efdbd9aa4d4de7d5676778aba384a2c3add896d995b23d368e60e5af"}, + {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d6009fdf8986332b2169314da482baed47ac053311c8934ac6651e614deacd6"}, + {file = "pyarrow-12.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d3c4cbbf81e6dd23fe921bc91dc4619ea3b79bc58ef10bce0f49bdafb103daf"}, + {file = "pyarrow-12.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdacf515ec276709ac8042c7d9bd5be83b4f5f39c6c037a17a60d7ebfd92c890"}, + {file = "pyarrow-12.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:749be7fd2ff260683f9cc739cb862fb11be376de965a2a8ccbf2693b098db6c7"}, + {file = "pyarrow-12.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6895b5fb74289d055c43db3af0de6e16b07586c45763cb5e558d38b86a91e3a7"}, + {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1887bdae17ec3b4c046fcf19951e71b6a619f39fa674f9881216173566c8f718"}, + {file = "pyarrow-12.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c9cb8eeabbadf5fcfc3d1ddea616c7ce893db2ce4dcef0ac13b099ad7ca082"}, + {file = "pyarrow-12.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:ce4aebdf412bd0eeb800d8e47db854f9f9f7e2f5a0220440acf219ddfddd4f63"}, + {file = "pyarrow-12.0.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:e0d8730c7f6e893f6db5d5b86eda42c0a130842d101992b581e2138e4d5663d3"}, + {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43364daec02f69fec89d2315f7fbfbeec956e0d991cbbef471681bd77875c40f"}, + {file = "pyarrow-12.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051f9f5ccf585f12d7de836e50965b3c235542cc896959320d9776ab93f3b33d"}, + {file = "pyarrow-12.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:be2757e9275875d2a9c6e6052ac7957fbbfc7bc7370e4a036a9b893e96fedaba"}, + {file = "pyarrow-12.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:cf812306d66f40f69e684300f7af5111c11f6e0d89d6b733e05a3de44961529d"}, + {file = "pyarrow-12.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:459a1c0ed2d68671188b2118c63bac91eaef6fc150c77ddd8a583e3c795737bf"}, + {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e705e33eaf666bbe508a16fd5ba27ca061e177916b7a317ba5a51bee43384c"}, + {file = "pyarrow-12.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9120c3eb2b1f6f516a3b7a9714ed860882d9ef98c4b17edcdc91d95b7528db60"}, + {file = "pyarrow-12.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:c780f4dc40460015d80fcd6a6140de80b615349ed68ef9adb653fe351778c9b3"}, + {file = "pyarrow-12.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:a3c63124fc26bf5f95f508f5d04e1ece8cc23a8b0af2a1e6ab2b1ec3fdc91b24"}, + {file = "pyarrow-12.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b13329f79fa4472324f8d32dc1b1216616d09bd1e77cfb13104dec5463632c36"}, + {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb656150d3d12ec1396f6dde542db1675a95c0cc8366d507347b0beed96e87ca"}, + {file = "pyarrow-12.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6251e38470da97a5b2e00de5c6a049149f7b2bd62f12fa5dbb9ac674119ba71a"}, + {file = "pyarrow-12.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3de26da901216149ce086920547dfff5cd22818c9eab67ebc41e863a5883bac7"}, + {file = "pyarrow-12.0.1.tar.gz", hash = "sha256:cce317fc96e5b71107bf1f9f184d5e54e2bd14bbf3f9a3d62819961f0af86fec"}, +] + +[package.dependencies] +numpy = ">=1.16.6" + +[[package]] +name = "pycodestyle" +version = "2.11.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, + {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pydantic" +version = "1.10.12" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, + {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, + {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, + {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, + {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, + {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, + {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, + {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, + {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pydata-sphinx-theme" +version = "0.13.3" +description = "Bootstrap-based Sphinx theme from the PyData community" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydata_sphinx_theme-0.13.3-py3-none-any.whl", hash = "sha256:bf41ca6c1c6216e929e28834e404bfc90e080b51915bbe7563b5e6fda70354f0"}, + {file = "pydata_sphinx_theme-0.13.3.tar.gz", hash = "sha256:827f16b065c4fd97e847c11c108bf632b7f2ff53a3bca3272f63f3f3ff782ecc"}, +] + +[package.dependencies] +accessible-pygments = "*" +Babel = "*" +beautifulsoup4 = "*" +docutils = "!=0.17.0" +packaging = "*" +pygments = ">=2.7" +sphinx = ">=4.2" +typing-extensions = "*" + +[package.extras] +dev = ["nox", "pre-commit", "pydata-sphinx-theme[doc,test]", "pyyaml"] +doc = ["ablog (>=0.11.0rc2)", "colorama", "ipyleaflet", "jupyter_sphinx", "linkify-it-py", "matplotlib", "myst-nb", "nbsphinx", "numpy", "numpydoc", "pandas", "plotly", "rich", "sphinx-copybutton", "sphinx-design", "sphinx-favicon (>=1.0.1)", "sphinx-sitemap", "sphinx-togglebutton", "sphinxcontrib-youtube", "sphinxext-rediraffe", "xarray"] +test = ["codecov", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "pydocstyle" +version = "6.3.0" +description = "Python docstring style checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, +] + +[package.dependencies] +snowballstemmer = ">=2.2.0" + +[package.extras] +toml = ["tomli (>=1.2.3)"] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pytest" +version = "7.4.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.21.1" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-gitlab" +version = "3.15.0" +description = "Interact with GitLab API" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "python-gitlab-3.15.0.tar.gz", hash = "sha256:c9e65eb7612a9fbb8abf0339972eca7fd7a73d4da66c9b446ffe528930aff534"}, + {file = "python_gitlab-3.15.0-py3-none-any.whl", hash = "sha256:8f8d1c0d387f642eb1ac7bf5e8e0cd8b3dd49c6f34170cee3c7deb7d384611f3"}, +] + +[package.dependencies] +requests = ">=2.25.0" +requests-toolbelt = ">=0.10.1" + +[package.extras] +autocompletion = ["argcomplete (>=1.10.0,<3)"] +yaml = ["PyYaml (>=5.2)"] + +[[package]] +name = "python-semantic-release" +version = "8.0.7" +description = "Automatic Semantic Versioning for Python projects" +optional = false +python-versions = ">=3.7" +files = [ + {file = "python-semantic-release-8.0.7.tar.gz", hash = "sha256:ba659a99d7f7ff11f7936cb9e5df117466c5161cc9a30d786270538ba0ea3798"}, + {file = "python_semantic_release-8.0.7-py3-none-any.whl", hash = "sha256:e38a1233d04c6568c75dd9802bbd8011d7f7040b042584ed2c1f58e91349fe64"}, +] + +[package.dependencies] +click = ">=8,<9" +dotty-dict = ">=1.3.0,<2" +gitpython = ">=3.0.8,<4" +importlib-resources = "5.7" +jinja2 = ">=3.1.2,<4" +pydantic = ">=1.10.2,<2" +python-gitlab = ">=2,<4" +requests = ">=2.25,<3" +rich = ">=12.5.1" +shellingham = ">=1.5.0.post1" +tomlkit = ">=0.10,<1.0" + +[package.extras] +dev = ["black", "pre-commit", "ruff (>=0.0.282)", "tox"] +docs = ["Sphinx (<=6.0.0)", "furo (>=2023.3.27)", "sphinx-autobuild (==2021.03.14)", "sphinxcontrib-apidoc (==0.3.0)"] +mypy = ["mypy", "types-requests"] +test = ["coverage[toml] (>=6,<8)", "pytest (>=7,<8)", "pytest-cov (>=4,<5)", "pytest-lazy-fixture (>=0.6.3,<0.7.0)", "pytest-mock (>=3,<4)", "pytest-xdist (>=2,<4)", "requests-mock (>=1.10.0,<2)", "responses (==0.23.3)", "types-pytest-lazy-fixture (>=0.6.3.3)"] + +[[package]] +name = "pytz" +version = "2023.3" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, +] + +[[package]] +name = "pyupgrade" +version = "3.10.1" +description = "A tool to automatically upgrade syntax for newer versions." +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "pyupgrade-3.10.1-py2.py3-none-any.whl", hash = "sha256:f565b4d26daa46ed522e98746834e77e444269103f8bc04413d77dad95169a24"}, + {file = "pyupgrade-3.10.1.tar.gz", hash = "sha256:1d8d138c2ccdd3c42b1419230ae036d5607dc69465a26feacc069642fc8d1b90"}, +] + +[package.dependencies] +tokenize-rt = ">=5.2.0" + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "pyzmq" +version = "25.1.1" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:381469297409c5adf9a0e884c5eb5186ed33137badcbbb0560b86e910a2f1e76"}, + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:955215ed0604dac5b01907424dfa28b40f2b2292d6493445dd34d0dfa72586a8"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985bbb1316192b98f32e25e7b9958088431d853ac63aca1d2c236f40afb17c83"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afea96f64efa98df4da6958bae37f1cbea7932c35878b185e5982821bc883369"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76705c9325d72a81155bb6ab48d4312e0032bf045fb0754889133200f7a0d849"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77a41c26205d2353a4c94d02be51d6cbdf63c06fbc1295ea57dad7e2d3381b71"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:12720a53e61c3b99d87262294e2b375c915fea93c31fc2336898c26d7aed34cd"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57459b68e5cd85b0be8184382cefd91959cafe79ae019e6b1ae6e2ba8a12cda7"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:292fe3fc5ad4a75bc8df0dfaee7d0babe8b1f4ceb596437213821f761b4589f9"}, + {file = "pyzmq-25.1.1-cp310-cp310-win32.whl", hash = "sha256:35b5ab8c28978fbbb86ea54958cd89f5176ce747c1fb3d87356cf698048a7790"}, + {file = "pyzmq-25.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:11baebdd5fc5b475d484195e49bae2dc64b94a5208f7c89954e9e354fc609d8f"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:d20a0ddb3e989e8807d83225a27e5c2eb2260eaa851532086e9e0fa0d5287d83"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e1c1be77bc5fb77d923850f82e55a928f8638f64a61f00ff18a67c7404faf008"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d89528b4943d27029a2818f847c10c2cecc79fa9590f3cb1860459a5be7933eb"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90f26dc6d5f241ba358bef79be9ce06de58d477ca8485e3291675436d3827cf8"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2b92812bd214018e50b6380ea3ac0c8bb01ac07fcc14c5f86a5bb25e74026e9"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f957ce63d13c28730f7fd6b72333814221c84ca2421298f66e5143f81c9f91f"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:047a640f5c9c6ade7b1cc6680a0e28c9dd5a0825135acbd3569cc96ea00b2505"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7f7e58effd14b641c5e4dec8c7dab02fb67a13df90329e61c869b9cc607ef752"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c2910967e6ab16bf6fbeb1f771c89a7050947221ae12a5b0b60f3bca2ee19bca"}, + {file = "pyzmq-25.1.1-cp311-cp311-win32.whl", hash = "sha256:76c1c8efb3ca3a1818b837aea423ff8a07bbf7aafe9f2f6582b61a0458b1a329"}, + {file = "pyzmq-25.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:44e58a0554b21fc662f2712814a746635ed668d0fbc98b7cb9d74cb798d202e6"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e1ffa1c924e8c72778b9ccd386a7067cddf626884fd8277f503c48bb5f51c762"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1af379b33ef33757224da93e9da62e6471cf4a66d10078cf32bae8127d3d0d4a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cff084c6933680d1f8b2f3b4ff5bbb88538a4aac00d199ac13f49d0698727ecb"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2400a94f7dd9cb20cd012951a0cbf8249e3d554c63a9c0cdfd5cbb6c01d2dec"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d81f1ddae3858b8299d1da72dd7d19dd36aab654c19671aa8a7e7fb02f6638a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:255ca2b219f9e5a3a9ef3081512e1358bd4760ce77828e1028b818ff5610b87b"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a882ac0a351288dd18ecae3326b8a49d10c61a68b01419f3a0b9a306190baf69"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:724c292bb26365659fc434e9567b3f1adbdb5e8d640c936ed901f49e03e5d32e"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ca1ed0bb2d850aa8471387882247c68f1e62a4af0ce9c8a1dbe0d2bf69e41fb"}, + {file = "pyzmq-25.1.1-cp312-cp312-win32.whl", hash = "sha256:b3451108ab861040754fa5208bca4a5496c65875710f76789a9ad27c801a0075"}, + {file = "pyzmq-25.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:eadbefd5e92ef8a345f0525b5cfd01cf4e4cc651a2cffb8f23c0dd184975d787"}, + {file = "pyzmq-25.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db0b2af416ba735c6304c47f75d348f498b92952f5e3e8bff449336d2728795d"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c133e93b405eb0d36fa430c94185bdd13c36204a8635470cccc200723c13bb"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:273bc3959bcbff3f48606b28229b4721716598d76b5aaea2b4a9d0ab454ec062"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cbc8df5c6a88ba5ae385d8930da02201165408dde8d8322072e3e5ddd4f68e22"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:18d43df3f2302d836f2a56f17e5663e398416e9dd74b205b179065e61f1a6edf"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:73461eed88a88c866656e08f89299720a38cb4e9d34ae6bf5df6f71102570f2e"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c850ce7976d19ebe7b9d4b9bb8c9dfc7aac336c0958e2651b88cbd46682123"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win32.whl", hash = "sha256:d2045d6d9439a0078f2a34b57c7b18c4a6aef0bee37f22e4ec9f32456c852c71"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:458dea649f2f02a0b244ae6aef8dc29325a2810aa26b07af8374dc2a9faf57e3"}, + {file = "pyzmq-25.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7cff25c5b315e63b07a36f0c2bab32c58eafbe57d0dce61b614ef4c76058c115"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1579413ae492b05de5a6174574f8c44c2b9b122a42015c5292afa4be2507f28"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d0a409d3b28607cc427aa5c30a6f1e4452cc44e311f843e05edb28ab5e36da0"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21eb4e609a154a57c520e3d5bfa0d97e49b6872ea057b7c85257b11e78068222"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:034239843541ef7a1aee0c7b2cb7f6aafffb005ede965ae9cbd49d5ff4ff73cf"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f8115e303280ba09f3898194791a153862cbf9eef722ad8f7f741987ee2a97c7"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1a5d26fe8f32f137e784f768143728438877d69a586ddeaad898558dc971a5ae"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win32.whl", hash = "sha256:f32260e556a983bc5c7ed588d04c942c9a8f9c2e99213fec11a031e316874c7e"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:abf34e43c531bbb510ae7e8f5b2b1f2a8ab93219510e2b287a944432fad135f3"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:87e34f31ca8f168c56d6fbf99692cc8d3b445abb5bfd08c229ae992d7547a92a"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c9c6c9b2c2f80747a98f34ef491c4d7b1a8d4853937bb1492774992a120f475d"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5619f3f5a4db5dbb572b095ea3cb5cc035335159d9da950830c9c4db2fbb6995"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a34d2395073ef862b4032343cf0c32a712f3ab49d7ec4f42c9661e0294d106f"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0e6b78220aba09815cd1f3a32b9c7cb3e02cb846d1cfc526b6595f6046618"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3669cf8ee3520c2f13b2e0351c41fea919852b220988d2049249db10046a7afb"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2d163a18819277e49911f7461567bda923461c50b19d169a062536fffe7cd9d2"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df27ffddff4190667d40de7beba4a950b5ce78fe28a7dcc41d6f8a700a80a3c0"}, + {file = "pyzmq-25.1.1-cp38-cp38-win32.whl", hash = "sha256:a382372898a07479bd34bda781008e4a954ed8750f17891e794521c3e21c2e1c"}, + {file = "pyzmq-25.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:52533489f28d62eb1258a965f2aba28a82aa747202c8fa5a1c7a43b5db0e85c1"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:03b3f49b57264909aacd0741892f2aecf2f51fb053e7d8ac6767f6c700832f45"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:330f9e188d0d89080cde66dc7470f57d1926ff2fb5576227f14d5be7ab30b9fa"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2ca57a5be0389f2a65e6d3bb2962a971688cbdd30b4c0bd188c99e39c234f414"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d457aed310f2670f59cc5b57dcfced452aeeed77f9da2b9763616bd57e4dbaae"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c56d748ea50215abef7030c72b60dd723ed5b5c7e65e7bc2504e77843631c1a6"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f03d3f0d01cb5a018debeb412441996a517b11c5c17ab2001aa0597c6d6882c"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:820c4a08195a681252f46926de10e29b6bbf3e17b30037bd4250d72dd3ddaab8"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17ef5f01d25b67ca8f98120d5fa1d21efe9611604e8eb03a5147360f517dd1e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win32.whl", hash = "sha256:04ccbed567171579ec2cebb9c8a3e30801723c575601f9a990ab25bcac6b51e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:e61f091c3ba0c3578411ef505992d356a812fb200643eab27f4f70eed34a29ef"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ade6d25bb29c4555d718ac6d1443a7386595528c33d6b133b258f65f963bb0f6"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c95ddd4f6e9fca4e9e3afaa4f9df8552f0ba5d1004e89ef0a68e1f1f9807c7"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48e466162a24daf86f6b5ca72444d2bf39a5e58da5f96370078be67c67adc978"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc719161780932c4e11aaebb203be3d6acc6b38d2f26c0f523b5b59d2fc1996"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ccf825981640b8c34ae54231b7ed00271822ea1c6d8ba1090ebd4943759abf5"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c2f20ce161ebdb0091a10c9ca0372e023ce24980d0e1f810f519da6f79c60800"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:deee9ca4727f53464daf089536e68b13e6104e84a37820a88b0a057b97bba2d2"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa8d6cdc8b8aa19ceb319aaa2b660cdaccc533ec477eeb1309e2a291eaacc43a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019e59ef5c5256a2c7378f2fb8560fc2a9ff1d315755204295b2eab96b254d0a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b9af3757495c1ee3b5c4e945c1df7be95562277c6e5bccc20a39aec50f826cd0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:548d6482dc8aadbe7e79d1b5806585c8120bafa1ef841167bc9090522b610fa6"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:057e824b2aae50accc0f9a0570998adc021b372478a921506fddd6c02e60308e"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2243700cc5548cff20963f0ca92d3e5e436394375ab8a354bbea2b12911b20b0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79986f3b4af059777111409ee517da24a529bdbd46da578b33f25580adcff728"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:11d58723d44d6ed4dd677c5615b2ffb19d5c426636345567d6af82be4dff8a55"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:49d238cf4b69652257db66d0c623cd3e09b5d2e9576b56bc067a396133a00d4a"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fedbdc753827cf014c01dbbee9c3be17e5a208dcd1bf8641ce2cd29580d1f0d4"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc16ac425cc927d0a57d242589f87ee093884ea4804c05a13834d07c20db203c"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11c1d2aed9079c6b0c9550a7257a836b4a637feb334904610f06d70eb44c56d2"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e8a701123029cc240cea61dd2d16ad57cab4691804143ce80ecd9286b464d180"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61706a6b6c24bdece85ff177fec393545a3191eeda35b07aaa1458a027ad1304"}, + {file = "pyzmq-25.1.1.tar.gz", hash = "sha256:259c22485b71abacdfa8bf79720cd7bcf4b9d128b30ea554f01ae71fdbfdaa23"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "referencing" +version = "0.30.2" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, + {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "restructuredtext-lint" +version = "1.4.0" +description = "reStructuredText linter" +optional = false +python-versions = "*" +files = [ + {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, +] + +[package.dependencies] +docutils = ">=0.11,<1.0" + +[[package]] +name = "rich" +version = "13.5.2" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, + {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rpds-py" +version = "0.9.2" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.9.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:ab6919a09c055c9b092798ce18c6c4adf49d24d4d9e43a92b257e3f2548231e7"}, + {file = "rpds_py-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d55777a80f78dd09410bd84ff8c95ee05519f41113b2df90a69622f5540c4f8b"}, + {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a216b26e5af0a8e265d4efd65d3bcec5fba6b26909014effe20cd302fd1138fa"}, + {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29cd8bfb2d716366a035913ced99188a79b623a3512292963d84d3e06e63b496"}, + {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44659b1f326214950a8204a248ca6199535e73a694be8d3e0e869f820767f12f"}, + {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:745f5a43fdd7d6d25a53ab1a99979e7f8ea419dfefebcab0a5a1e9095490ee5e"}, + {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a987578ac5214f18b99d1f2a3851cba5b09f4a689818a106c23dbad0dfeb760f"}, + {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf4151acb541b6e895354f6ff9ac06995ad9e4175cbc6d30aaed08856558201f"}, + {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:03421628f0dc10a4119d714a17f646e2837126a25ac7a256bdf7c3943400f67f"}, + {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13b602dc3e8dff3063734f02dcf05111e887f301fdda74151a93dbbc249930fe"}, + {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fae5cb554b604b3f9e2c608241b5d8d303e410d7dfb6d397c335f983495ce7f6"}, + {file = "rpds_py-0.9.2-cp310-none-win32.whl", hash = "sha256:47c5f58a8e0c2c920cc7783113df2fc4ff12bf3a411d985012f145e9242a2764"}, + {file = "rpds_py-0.9.2-cp310-none-win_amd64.whl", hash = "sha256:4ea6b73c22d8182dff91155af018b11aac9ff7eca085750455c5990cb1cfae6e"}, + {file = "rpds_py-0.9.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e564d2238512c5ef5e9d79338ab77f1cbbda6c2d541ad41b2af445fb200385e3"}, + {file = "rpds_py-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f411330a6376fb50e5b7a3e66894e4a39e60ca2e17dce258d53768fea06a37bd"}, + {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e7521f5af0233e89939ad626b15278c71b69dc1dfccaa7b97bd4cdf96536bb7"}, + {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8d3335c03100a073883857e91db9f2e0ef8a1cf42dc0369cbb9151c149dbbc1b"}, + {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d25b1c1096ef0447355f7293fbe9ad740f7c47ae032c2884113f8e87660d8f6e"}, + {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a5d3fbd02efd9cf6a8ffc2f17b53a33542f6b154e88dd7b42ef4a4c0700fdad"}, + {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5934e2833afeaf36bd1eadb57256239785f5af0220ed8d21c2896ec4d3a765f"}, + {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:095b460e117685867d45548fbd8598a8d9999227e9061ee7f012d9d264e6048d"}, + {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:91378d9f4151adc223d584489591dbb79f78814c0734a7c3bfa9c9e09978121c"}, + {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:24a81c177379300220e907e9b864107614b144f6c2a15ed5c3450e19cf536fae"}, + {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:de0b6eceb46141984671802d412568d22c6bacc9b230174f9e55fc72ef4f57de"}, + {file = "rpds_py-0.9.2-cp311-none-win32.whl", hash = "sha256:700375326ed641f3d9d32060a91513ad668bcb7e2cffb18415c399acb25de2ab"}, + {file = "rpds_py-0.9.2-cp311-none-win_amd64.whl", hash = "sha256:0766babfcf941db8607bdaf82569ec38107dbb03c7f0b72604a0b346b6eb3298"}, + {file = "rpds_py-0.9.2-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1440c291db3f98a914e1afd9d6541e8fc60b4c3aab1a9008d03da4651e67386"}, + {file = "rpds_py-0.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0f2996fbac8e0b77fd67102becb9229986396e051f33dbceada3debaacc7033f"}, + {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f30d205755566a25f2ae0382944fcae2f350500ae4df4e795efa9e850821d82"}, + {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:159fba751a1e6b1c69244e23ba6c28f879a8758a3e992ed056d86d74a194a0f3"}, + {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1f044792e1adcea82468a72310c66a7f08728d72a244730d14880cd1dabe36b"}, + {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9251eb8aa82e6cf88510530b29eef4fac825a2b709baf5b94a6094894f252387"}, + {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01899794b654e616c8625b194ddd1e5b51ef5b60ed61baa7a2d9c2ad7b2a4238"}, + {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0c43f8ae8f6be1d605b0465671124aa8d6a0e40f1fb81dcea28b7e3d87ca1e1"}, + {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:207f57c402d1f8712618f737356e4b6f35253b6d20a324d9a47cb9f38ee43a6b"}, + {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b52e7c5ae35b00566d244ffefba0f46bb6bec749a50412acf42b1c3f402e2c90"}, + {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:978fa96dbb005d599ec4fd9ed301b1cc45f1a8f7982d4793faf20b404b56677d"}, + {file = "rpds_py-0.9.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6aa8326a4a608e1c28da191edd7c924dff445251b94653988efb059b16577a4d"}, + {file = "rpds_py-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aad51239bee6bff6823bbbdc8ad85136c6125542bbc609e035ab98ca1e32a192"}, + {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd4dc3602370679c2dfb818d9c97b1137d4dd412230cfecd3c66a1bf388a196"}, + {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd9da77c6ec1f258387957b754f0df60766ac23ed698b61941ba9acccd3284d1"}, + {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:190ca6f55042ea4649ed19c9093a9be9d63cd8a97880106747d7147f88a49d18"}, + {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:876bf9ed62323bc7dcfc261dbc5572c996ef26fe6406b0ff985cbcf460fc8a4c"}, + {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa2818759aba55df50592ecbc95ebcdc99917fa7b55cc6796235b04193eb3c55"}, + {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ea4d00850ef1e917815e59b078ecb338f6a8efda23369677c54a5825dbebb55"}, + {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5855c85eb8b8a968a74dc7fb014c9166a05e7e7a8377fb91d78512900aadd13d"}, + {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:14c408e9d1a80dcb45c05a5149e5961aadb912fff42ca1dd9b68c0044904eb32"}, + {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:65a0583c43d9f22cb2130c7b110e695fff834fd5e832a776a107197e59a1898e"}, + {file = "rpds_py-0.9.2-cp38-none-win32.whl", hash = "sha256:71f2f7715935a61fa3e4ae91d91b67e571aeb5cb5d10331ab681256bda2ad920"}, + {file = "rpds_py-0.9.2-cp38-none-win_amd64.whl", hash = "sha256:674c704605092e3ebbbd13687b09c9f78c362a4bc710343efe37a91457123044"}, + {file = "rpds_py-0.9.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:07e2c54bef6838fa44c48dfbc8234e8e2466d851124b551fc4e07a1cfeb37260"}, + {file = "rpds_py-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7fdf55283ad38c33e35e2855565361f4bf0abd02470b8ab28d499c663bc5d7c"}, + {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:890ba852c16ace6ed9f90e8670f2c1c178d96510a21b06d2fa12d8783a905193"}, + {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50025635ba8b629a86d9d5474e650da304cb46bbb4d18690532dd79341467846"}, + {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517cbf6e67ae3623c5127206489d69eb2bdb27239a3c3cc559350ef52a3bbf0b"}, + {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0836d71ca19071090d524739420a61580f3f894618d10b666cf3d9a1688355b1"}, + {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c439fd54b2b9053717cca3de9583be6584b384d88d045f97d409f0ca867d80f"}, + {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f68996a3b3dc9335037f82754f9cdbe3a95db42bde571d8c3be26cc6245f2324"}, + {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7d68dc8acded354c972116f59b5eb2e5864432948e098c19fe6994926d8e15c3"}, + {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f963c6b1218b96db85fc37a9f0851eaf8b9040aa46dec112611697a7023da535"}, + {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a46859d7f947061b4010e554ccd1791467d1b1759f2dc2ec9055fa239f1bc26"}, + {file = "rpds_py-0.9.2-cp39-none-win32.whl", hash = "sha256:e07e5dbf8a83c66783a9fe2d4566968ea8c161199680e8ad38d53e075df5f0d0"}, + {file = "rpds_py-0.9.2-cp39-none-win_amd64.whl", hash = "sha256:682726178138ea45a0766907957b60f3a1bf3acdf212436be9733f28b6c5af3c"}, + {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:196cb208825a8b9c8fc360dc0f87993b8b260038615230242bf18ec84447c08d"}, + {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c7671d45530fcb6d5e22fd40c97e1e1e01965fc298cbda523bb640f3d923b387"}, + {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83b32f0940adec65099f3b1c215ef7f1d025d13ff947975a055989cb7fd019a4"}, + {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f67da97f5b9eac838b6980fc6da268622e91f8960e083a34533ca710bec8611"}, + {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03975db5f103997904c37e804e5f340c8fdabbb5883f26ee50a255d664eed58c"}, + {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:987b06d1cdb28f88a42e4fb8a87f094e43f3c435ed8e486533aea0bf2e53d931"}, + {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c861a7e4aef15ff91233751619ce3a3d2b9e5877e0fcd76f9ea4f6847183aa16"}, + {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02938432352359805b6da099c9c95c8a0547fe4b274ce8f1a91677401bb9a45f"}, + {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ef1f08f2a924837e112cba2953e15aacfccbbfcd773b4b9b4723f8f2ddded08e"}, + {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:35da5cc5cb37c04c4ee03128ad59b8c3941a1e5cd398d78c37f716f32a9b7f67"}, + {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:141acb9d4ccc04e704e5992d35472f78c35af047fa0cfae2923835d153f091be"}, + {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79f594919d2c1a0cc17d1988a6adaf9a2f000d2e1048f71f298b056b1018e872"}, + {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a06418fe1155e72e16dddc68bb3780ae44cebb2912fbd8bb6ff9161de56e1798"}, + {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2eb034c94b0b96d5eddb290b7b5198460e2d5d0c421751713953a9c4e47d10"}, + {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b08605d248b974eb02f40bdcd1a35d3924c83a2a5e8f5d0fa5af852c4d960af"}, + {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0805911caedfe2736935250be5008b261f10a729a303f676d3d5fea6900c96a"}, + {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab2299e3f92aa5417d5e16bb45bb4586171c1327568f638e8453c9f8d9e0f020"}, + {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c8d7594e38cf98d8a7df25b440f684b510cf4627fe038c297a87496d10a174f"}, + {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b9ec12ad5f0a4625db34db7e0005be2632c1013b253a4a60e8302ad4d462afd"}, + {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1fcdee18fea97238ed17ab6478c66b2095e4ae7177e35fb71fbe561a27adf620"}, + {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:933a7d5cd4b84f959aedeb84f2030f0a01d63ae6cf256629af3081cf3e3426e8"}, + {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:686ba516e02db6d6f8c279d1641f7067ebb5dc58b1d0536c4aaebb7bf01cdc5d"}, + {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0173c0444bec0a3d7d848eaeca2d8bd32a1b43f3d3fde6617aac3731fa4be05f"}, + {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d576c3ef8c7b2d560e301eb33891d1944d965a4d7a2eacb6332eee8a71827db6"}, + {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed89861ee8c8c47d6beb742a602f912b1bb64f598b1e2f3d758948721d44d468"}, + {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1054a08e818f8e18910f1bee731583fe8f899b0a0a5044c6e680ceea34f93876"}, + {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99e7c4bb27ff1aab90dcc3e9d37ee5af0231ed98d99cb6f5250de28889a3d502"}, + {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c545d9d14d47be716495076b659db179206e3fd997769bc01e2d550eeb685596"}, + {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9039a11bca3c41be5a58282ed81ae422fa680409022b996032a43badef2a3752"}, + {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fb39aca7a64ad0c9490adfa719dbeeb87d13be137ca189d2564e596f8ba32c07"}, + {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2d8b3b3a2ce0eaa00c5bbbb60b6713e94e7e0becab7b3db6c5c77f979e8ed1f1"}, + {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:99b1c16f732b3a9971406fbfe18468592c5a3529585a45a35adbc1389a529a03"}, + {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c27ee01a6c3223025f4badd533bea5e87c988cb0ba2811b690395dfe16088cfe"}, + {file = "rpds_py-0.9.2.tar.gz", hash = "sha256:8d70e8f14900f2657c249ea4def963bed86a29b81f81f5b76b5a9215680de945"}, +] + +[[package]] +name = "ruamel-yaml" +version = "0.17.32" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3" +files = [ + {file = "ruamel.yaml-0.17.32-py3-none-any.whl", hash = "sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447"}, + {file = "ruamel.yaml-0.17.32.tar.gz", hash = "sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.12\""} + +[package.extras] +docs = ["ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.7" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.5" +files = [ + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, + {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win32.whl", hash = "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e"}, + {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-win_amd64.whl", hash = "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win32.whl", hash = "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93"}, + {file = "ruamel.yaml.clib-0.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win32.whl", hash = "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac"}, + {file = "ruamel.yaml.clib-0.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win32.whl", hash = "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8"}, + {file = "ruamel.yaml.clib-0.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5"}, + {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, +] + +[[package]] +name = "safety" +version = "2.3.4" +description = "Checks installed dependencies for known vulnerabilities and licenses." +optional = false +python-versions = "*" +files = [ + {file = "safety-2.3.4-py3-none-any.whl", hash = "sha256:6224dcd9b20986a2b2c5e7acfdfba6bca42bb11b2783b24ed04f32317e5167ea"}, + {file = "safety-2.3.4.tar.gz", hash = "sha256:b9e74e794e82f54d11f4091c5d820c4d2d81de9f953bf0b4f33ac8bc402ae72c"}, +] + +[package.dependencies] +Click = ">=8.0.2" +dparse = ">=0.6.2" +packaging = ">=21.0" +requests = "*" +"ruamel.yaml" = ">=0.17.21" +setuptools = ">=19.3" + +[package.extras] +github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] +gitlab = ["python-gitlab (>=1.3.0)"] + +[[package]] +name = "setuptools" +version = "68.1.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"}, + {file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "shellingham" +version = "1.5.3" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.3-py2.py3-none-any.whl", hash = "sha256:419c6a164770c9c7cfcaeddfacb3d31ac7a8db0b0f3e9c1287679359734107e9"}, + {file = "shellingham-1.5.3.tar.gz", hash = "sha256:cb4a6fec583535bc6da17b647dd2330cf7ef30239e05d547d99ae3705fd0f7f8"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.6" +files = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "soupsieve" +version = "2.4.1" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, + {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, +] + +[[package]] +name = "sphinx" +version = "6.2.1" +description = "Python documentation generator" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Sphinx-6.2.1.tar.gz", hash = "sha256:6d56a34697bb749ffa0152feafc4b19836c755d90a7c59b72bc7dfd371b9cc6b"}, + {file = "sphinx-6.2.1-py3-none-any.whl", hash = "sha256:97787ff1fa3256a3eef9eda523a63dbf299f7b47e053cfcf684a1c2a8380c912"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.18.1,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.13" +requests = ">=2.25.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] + +[[package]] +name = "sphinx-autobuild" +version = "2021.3.14" +description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." +optional = false +python-versions = ">=3.6" +files = [ + {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, + {file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"}, +] + +[package.dependencies] +colorama = "*" +livereload = "*" +sphinx = "*" + +[package.extras] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "sphinx-book-theme" +version = "1.0.1" +description = "A clean book theme for scientific explanations and documentation with Sphinx" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sphinx_book_theme-1.0.1-py3-none-any.whl", hash = "sha256:d15f8248b3718a9a6be0ba617a32d1591f9fa39c614469bface777ba06a73b75"}, + {file = "sphinx_book_theme-1.0.1.tar.gz", hash = "sha256:927b399a6906be067e49c11ef1a87472f1b1964075c9eea30fb82c64b20aedee"}, +] + +[package.dependencies] +pydata-sphinx-theme = ">=0.13.3" +sphinx = ">=4,<7" + +[package.extras] +code-style = ["pre-commit"] +doc = ["ablog", "docutils (==0.17.1)", "folium", "ipywidgets", "matplotlib", "myst-nb", "nbclient", "numpy", "numpydoc", "pandas", "plotly", "sphinx-copybutton", "sphinx-design", "sphinx-examples", "sphinx-tabs (<=3.4.0)", "sphinx-thebe", "sphinx-togglebutton", "sphinxcontrib-bibtex", "sphinxcontrib-youtube", "sphinxext-opengraph"] +test = ["beautifulsoup4", "coverage", "myst-nb", "pytest", "pytest-cov", "pytest-regressions", "sphinx_thebe"] + +[[package]] +name = "sphinx-copybutton" +version = "0.5.2" +description = "Add a copy button to each of your code cells." +optional = false +python-versions = ">=3.7" +files = [ + {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, + {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, +] + +[package.dependencies] +sphinx = ">=1.8" + +[package.extras] +code-style = ["pre-commit (==2.12.1)"] +rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] + +[[package]] +name = "sphinx-design" +version = "0.5.0" +description = "A sphinx extension for designing beautiful, view size responsive web components." +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinx_design-0.5.0-py3-none-any.whl", hash = "sha256:1af1267b4cea2eedd6724614f19dcc88fe2e15aff65d06b2f6252cee9c4f4c1e"}, + {file = "sphinx_design-0.5.0.tar.gz", hash = "sha256:e8e513acea6f92d15c6de3b34e954458f245b8e761b45b63950f65373352ab00"}, +] + +[package.dependencies] +sphinx = ">=5,<8" + +[package.extras] +code-style = ["pre-commit (>=3,<4)"] +rtd = ["myst-parser (>=1,<3)"] +testing = ["myst-parser (>=1,<3)", "pytest (>=7.1,<8.0)", "pytest-cov", "pytest-regressions"] +theme-furo = ["furo (>=2023.7.0,<2023.8.0)"] +theme-pydata = ["pydata-sphinx-theme (>=0.13.0,<0.14.0)"] +theme-rtd = ["sphinx-rtd-theme (>=1.0,<2.0)"] +theme-sbt = ["sphinx-book-theme (>=1.0,<2.0)"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sqlalchemy" +version = "2.0.20" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759b51346aa388c2e606ee206c0bc6f15a5299f6174d1e10cadbe4530d3c7a98"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1506e988ebeaaf316f183da601f24eedd7452e163010ea63dbe52dc91c7fc70e"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5768c268df78bacbde166b48be788b83dddaa2a5974b8810af422ddfe68a9bc8"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3f0dd6d15b6dc8b28a838a5c48ced7455c3e1fb47b89da9c79cc2090b072a50"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:243d0fb261f80a26774829bc2cee71df3222587ac789b7eaf6555c5b15651eed"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb6d77c31e1bf4268b4d61b549c341cbff9842f8e115ba6904249c20cb78a61"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-win32.whl", hash = "sha256:bcb04441f370cbe6e37c2b8d79e4af9e4789f626c595899d94abebe8b38f9a4d"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-win_amd64.whl", hash = "sha256:d32b5ffef6c5bcb452723a496bad2d4c52b346240c59b3e6dba279f6dcc06c14"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd81466bdbc82b060c3c110b2937ab65ace41dfa7b18681fdfad2f37f27acdd7"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fe7d61dc71119e21ddb0094ee994418c12f68c61b3d263ebaae50ea8399c4d4"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4e571af672e1bb710b3cc1a9794b55bce1eae5aed41a608c0401885e3491179"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3364b7066b3c7f4437dd345d47271f1251e0cfb0aba67e785343cdbdb0fff08c"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1be86ccea0c965a1e8cd6ccf6884b924c319fcc85765f16c69f1ae7148eba64b"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1d35d49a972649b5080557c603110620a86aa11db350d7a7cb0f0a3f611948a0"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-win32.whl", hash = "sha256:27d554ef5d12501898d88d255c54eef8414576f34672e02fe96d75908993cf53"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-win_amd64.whl", hash = "sha256:411e7f140200c02c4b953b3dbd08351c9f9818d2bd591b56d0fa0716bd014f1e"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3c6aceebbc47db04f2d779db03afeaa2c73ea3f8dcd3987eb9efdb987ffa09a3"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d3f175410a6db0ad96b10bfbb0a5530ecd4fcf1e2b5d83d968dd64791f810ed"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8186be85da6587456c9ddc7bf480ebad1a0e6dcbad3967c4821233a4d4df57"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c3d99ba99007dab8233f635c32b5cd24fb1df8d64e17bc7df136cedbea427897"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:76fdfc0f6f5341987474ff48e7a66c3cd2b8a71ddda01fa82fedb180b961630a"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-win32.whl", hash = "sha256:d3793dcf5bc4d74ae1e9db15121250c2da476e1af8e45a1d9a52b1513a393459"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-win_amd64.whl", hash = "sha256:79fde625a0a55220d3624e64101ed68a059c1c1f126c74f08a42097a72ff66a9"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:599ccd23a7146e126be1c7632d1d47847fa9f333104d03325c4e15440fc7d927"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1a58052b5a93425f656675673ef1f7e005a3b72e3f2c91b8acca1b27ccadf5f4"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79543f945be7a5ada9943d555cf9b1531cfea49241809dd1183701f94a748624"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63e73da7fb030ae0a46a9ffbeef7e892f5def4baf8064786d040d45c1d6d1dc5"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ce5e81b800a8afc870bb8e0a275d81957e16f8c4b62415a7b386f29a0cb9763"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb0d3e94c2a84215532d9bcf10229476ffd3b08f481c53754113b794afb62d14"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-win32.whl", hash = "sha256:8dd77fd6648b677d7742d2c3cc105a66e2681cc5e5fb247b88c7a7b78351cf74"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-win_amd64.whl", hash = "sha256:6f8a934f9dfdf762c844e5164046a9cea25fabbc9ec865c023fe7f300f11ca4a"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:26a3399eaf65e9ab2690c07bd5cf898b639e76903e0abad096cd609233ce5208"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4cde2e1096cbb3e62002efdb7050113aa5f01718035ba9f29f9d89c3758e7e4e"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b09ba72e4e6d341bb5bdd3564f1cea6095d4c3632e45dc69375a1dbe4e26ec"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b74eeafaa11372627ce94e4dc88a6751b2b4d263015b3523e2b1e57291102f0"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:77d37c1b4e64c926fa3de23e8244b964aab92963d0f74d98cbc0783a9e04f501"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eefebcc5c555803065128401a1e224a64607259b5eb907021bf9b175f315d2a6"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-win32.whl", hash = "sha256:3423dc2a3b94125094897118b52bdf4d37daf142cbcf26d48af284b763ab90e9"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-win_amd64.whl", hash = "sha256:5ed61e3463021763b853628aef8bc5d469fe12d95f82c74ef605049d810f3267"}, + {file = "SQLAlchemy-2.0.20-py3-none-any.whl", hash = "sha256:63a368231c53c93e2b67d0c5556a9836fdcd383f7e3026a39602aad775b14acf"}, + {file = "SQLAlchemy-2.0.20.tar.gz", hash = "sha256:ca8a5ff2aa7f3ade6c498aaafce25b1eaeabe4e42b73e25519183e4566a16fc6"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} +typing-extensions = ">=4.2.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3-binary"] + +[[package]] +name = "stack-data" +version = "0.6.2" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, + {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = true +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "tokenize-rt" +version = "5.2.0" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tokenize_rt-5.2.0-py2.py3-none-any.whl", hash = "sha256:b79d41a65cfec71285433511b50271b05da3584a1da144a0752e9c621a285289"}, + {file = "tokenize_rt-5.2.0.tar.gz", hash = "sha256:9fe80f8a5c1edad2d3ede0f37481cc0cc1538a2f442c9c2f9e4feacd2792d054"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tomlkit" +version = "0.12.1" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, + {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, +] + +[[package]] +name = "tornado" +version = "6.3.3" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">= 3.8" +files = [ + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, + {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, + {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, + {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, +] + +[[package]] +name = "traitlets" +version = "5.9.0" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.7" +files = [ + {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, + {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] + +[[package]] +name = "typeguard" +version = "4.1.2" +description = "Run-time type checker for Python" +optional = false +python-versions = ">=3.7.4" +files = [ + {file = "typeguard-4.1.2-py3-none-any.whl", hash = "sha256:e00775920d4c91e93a0db0ed473ecda9cfaca578aed3ce0ed3ba7f3cc38eab9c"}, + {file = "typeguard-4.1.2.tar.gz", hash = "sha256:3be187945f9ef5a9f6d7a926dfe54babb7dfd807085ce05f9a5e8735f2487990"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.7.0", markers = "python_version < \"3.12\""} + +[package.extras] +doc = ["Sphinx (<7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["mypy (>=1.2.0)", "pytest (>=7)"] + +[[package]] +name = "types-pytz" +version = "2023.3.0.1" +description = "Typing stubs for pytz" +optional = false +python-versions = "*" +files = [ + {file = "types-pytz-2023.3.0.1.tar.gz", hash = "sha256:1a7b8d4aac70981cfa24478a41eadfcd96a087c986d6f150d77e3ceb3c2bdfab"}, + {file = "types_pytz-2023.3.0.1-py3-none-any.whl", hash = "sha256:65152e872137926bb67a8fe6cc9cfd794365df86650c5d5fdc7b167b0f38892e"}, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, +] + +[[package]] +name = "urllib3" +version = "2.0.4" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, + {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "virtualenv" +version = "20.24.3" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.24.3-py3-none-any.whl", hash = "sha256:95a6e9398b4967fbcb5fef2acec5efaf9aa4972049d9ae41f95e0972a683fd02"}, + {file = "virtualenv-20.24.3.tar.gz", hash = "sha256:e5c3b4ce817b0b328af041506a2a299418c98747c4b1e68cb7527e74ced23efc"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<4" + +[package.extras] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "wcwidth" +version = "0.2.6" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, +] + +[[package]] +name = "xdoctest" +version = "1.1.1" +description = "A rewrite of the builtin doctest module" +optional = false +python-versions = ">=3.6" +files = [ + {file = "xdoctest-1.1.1-py3-none-any.whl", hash = "sha256:d59d4ed91cb92e4430ef0ad1b134a2bef02adff7d2fb9c9f057547bee44081a2"}, + {file = "xdoctest-1.1.1.tar.gz", hash = "sha256:2eac8131bdcdf2781b4e5a62d6de87f044b730cc8db8af142a51bb29c245e779"}, +] + +[package.dependencies] +colorama = {version = "*", optional = true, markers = "platform_system == \"Windows\" and extra == \"colors\""} +Pygments = {version = "*", optional = true, markers = "python_version >= \"3.5.0\" and extra == \"colors\""} +six = "*" + +[package.extras] +all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "pytest", "pytest", "pytest", "pytest-cov", "six", "tomli", "typing"] +all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "six (==1.11.0)", "tomli (==0.2.0)", "typing (==3.7.4)"] +colors = ["Pygments", "Pygments", "colorama"] +jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"] +optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"] +optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"] +runtime-strict = ["six (==1.11.0)"] +tests = ["codecov", "pytest", "pytest", "pytest", "pytest-cov", "typing"] +tests-binary = ["cmake", "cmake", "ninja", "ninja", "pybind11", "pybind11", "scikit-build", "scikit-build"] +tests-binary-strict = ["cmake (==3.21.2)", "cmake (==3.25.0)", "ninja (==1.10.2)", "ninja (==1.11.1)", "pybind11 (==2.10.3)", "pybind11 (==2.7.1)", "scikit-build (==0.11.1)", "scikit-build (==0.16.1)"] +tests-strict = ["codecov (==2.0.15)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==3.0.0)", "typing (==3.7.4)"] + +[[package]] +name = "zipp" +version = "3.16.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, + {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[extras] +plot = ["plotly"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.8.1,<4.0" +content-hash = "ea3a81f3ec5a9e34cfa001d87d8adb3d2a5f180b3ba68f785ace5dd1f700393a" diff --git a/python/pyproject.toml b/python/pyproject.toml new file mode 100644 index 000000000..58d7635b5 --- /dev/null +++ b/python/pyproject.toml @@ -0,0 +1,155 @@ +[project] +name = "kaskada" +version = "0.6.0-a.1" +description = "Kaskada query builder and local execution engine." +requires-python = ">=3.8.1,<4.0" +classifiers = [ + "Development Status :: 3 - Alpha", +] +# We need to list dependencies here for maturin to put them in the package. +# They should match what poetry believes we need. +dependencies = [ + "pandas >= 2.0.0, < 3.0.0", + "pyarrow >= 12.0.0", + "typing-extensions >= 4.6.0", +] + +[project.optional-dependencies] +plot = [ + "plotly >= 5.0.0, < 6.0.0", +] + +[tool.poetry] +name = "kaskada" +description = "Kaskada query builder and local execution engine." +authors = [] +version = "0.6.0-a.1" + +[tool.poetry.dependencies] +# Dependencies to install for using the project. +# These are used when we do `poetry install`. +# They should be reflected in `dependencies` above. +pandas = "^2.0.3" +python = ">=3.8.1,<4.0" +pyarrow = "^12.0.1" +typing-extensions = "^4.7.1" +plotly = {version = "^5.16.1", optional = true} + +[tool.poetry.extras] +plot = ["plotly"] + +[tool.poetry.group.dev.dependencies] +# Dependencies for building and developing. +maturin = "^1.1.0" +nox = "2023.4.22" + +[tool.poetry.group.lint] +# Dependencies for linting. +optional = true + +[tool.poetry.group.lint.dependencies] +black = ">=21.10b0" +darglint = ">=1.8.1" +flake8 = ">=4.0.1" +flake8-bugbear = ">=21.9.2" +flake8-rst-docstrings = ">=0.2.5" +isort = ">=5.10.1" +pep8-naming = ">=0.12.1" +pydocstyle = "^6.3.0" +pyupgrade = ">=2.29.1" +autoflake = "^2.2.0" + +[tool.poetry.group.safety] +optional = true + +[tool.poetry.group.safety.dependencies] +safety = ">=1.10.3" + +[tool.poetry.group.typecheck] +# Dependencies for typechecking (mypy, etc.) +optional = true + +[tool.poetry.group.typecheck.dependencies] +mypy = ">=0.930" +pandas-stubs = "^2.0.2" +typeguard = ">=2.13.3" + +[tool.poetry.group.docs] +# Dependencies for documentation. +optional = true + +[tool.poetry.group.docs.dependencies] +sphinx = ">=6.0.0" +sphinx-autobuild = ">=2021.3.14" +sphinx-book-theme = "^1.0.1" +sphinx-copybutton = "^0.5.2" +sphinx-design = "^0.5.0" +myst-parser = {version = ">=0.16.1"} +# Use myst-nb from git since the currently released version (0.17.2) pins +# Sphinx to < 6. Once a new release occurs we can upgrade to `0.18.0` or newer. +# https://github.com/executablebooks/MyST-NB/issues/530 +myst-nb = { git = "https://github.com/executablebooks/MyST-NB.git", rev = "3d6a5d1"} +plotly = {version = "^5.16.1"} + +[tool.poetry.group.test] +# Dependencies for testing +optional = true + +[tool.poetry.group.test.dependencies] +coverage = { extras = ["toml"], version = ">=6.2"} +pytest = ">=6.2.5" +pytest-asyncio = "^0.21.1" +xdoctest = {extras = ["colors"], version = ">=0.15.10"} + +[tool.poetry.group.release] +# Dependencies for performing the +optional = true + +[tool.poetry.group.release.dependencies] +tomlkit = "^0.12.1" + +[build-system] +requires = ["maturin>=1,<2"] +build-backend = "maturin" + +[tool.maturin] +profile = "release" +# Path to the python source directory +python-source = "pysrc" +# Name of the Rust module in Python +module-name = "kaskada._ffi" + +[tool.maturin.target.x86_64-apple-darwin] +macos-deployment-target = "11.0" +[tool.maturin.target.aarch64-apple-darwin] +macos-deployment-target = "11.0" + +[tool.coverage.paths] +source = ["pysrc", "*/site-packages"] +tests = ["pytests"] + +[tool.coverage.run] +branch = true +source = ["pysrc", "pytests"] + +[tool.coverage.report] +show_missing = true +fail_under = 100 + +[tool.isort] +profile = "black" +force_single_line = true +lines_after_imports = 2 + +[tool.mypy] +strict = true +warn_unreachable = true +pretty = true +show_column_numbers = true +show_error_context = true + +# pyproject.toml +[tool.pytest.ini_options] +testpaths = [ + "pytests", +] \ No newline at end of file diff --git a/python/pysrc/kaskada/__init__.py b/python/pysrc/kaskada/__init__.py new file mode 100644 index 000000000..4534b480e --- /dev/null +++ b/python/pysrc/kaskada/__init__.py @@ -0,0 +1,25 @@ +"""Kaskada query builder and local executon engine.""" +from __future__ import annotations + +from . import plot +from . import sources +from . import windows +from ._execution import ExecutionOptions +from ._result import Result +from ._session import init_session +from ._timestream import Literal +from ._timestream import Timestream +from ._timestream import record + + +__all__ = [ + "ExecutionOptions", + "init_session", + "Literal", + "plot", + "record", + "Result", + "sources", + "Timestream", + "windows", +] diff --git a/python/pysrc/kaskada/_execution.py b/python/pysrc/kaskada/_execution.py new file mode 100644 index 000000000..4829264dd --- /dev/null +++ b/python/pysrc/kaskada/_execution.py @@ -0,0 +1,25 @@ +from dataclasses import dataclass +from typing import Optional + + +@dataclass +class ExecutionOptions: + """Execution options for a query. + + Attributes + ---------- + row_limit : Optional[int] + The maximum number of rows to return. + If not specified, all rows are returned. + + max_batch_size : Optional[int] + The maximum batch size to use when returning results. + If not specified, the default batch size will be used. + + materialize : bool + If true, the query will be a continuous materialization. + """ + + row_limit: Optional[int] = None + max_batch_size: Optional[int] = None + materialize: bool = False diff --git a/python/pysrc/kaskada/_ffi.pyi b/python/pysrc/kaskada/_ffi.pyi new file mode 100644 index 000000000..f34f7f254 --- /dev/null +++ b/python/pysrc/kaskada/_ffi.pyi @@ -0,0 +1,54 @@ +from typing import List +from typing import Optional +from typing import Sequence + +import pyarrow as pa + +from ._execution import ExecutionOptions +from .udf import Udf + +class Session: + def __init__(self) -> None: ... + +class Execution(object): + def collect_pyarrow(self) -> List[pa.RecordBatch]: ... + def next_pyarrow(self) -> Optional[pa.RecordBatch]: ... + def stop(self) -> None: ... + async def next_pyarrow_async(self) -> Optional[pa.RecordBatch]: ... + +class Expr: + @staticmethod + def call( + session: Session, + operation: str, + args: Sequence[Expr], + ) -> Expr: ... + @staticmethod + def literal( + session: Session, + value: int | float | str | None, + ) -> Expr: ... + def cast(self, data_type: pa.DataType) -> Expr: ... + def data_type(self) -> pa.DataType: ... + def is_continuous(self) -> bool: ... + def session(self) -> Session: ... + def execute(self, options: Optional[ExecutionOptions] = None) -> Execution: ... + def grouping(self) -> Optional[str]: ... + +def call_udf(udf: Udf, result_type: pa.DataType, *args: pa.Array) -> pa.Array: ... + +class Table(Expr): + def __init__( + self, + session: Session, + name: str, + time_column_name: str, + key_column_name: str, + schema: pa.Schema, + subsort_column_name: Optional[str], + grouping_name: Optional[str], + time_unit: Optional[str], + ) -> None: ... + @property + def name(self) -> str: ... + def add_pyarrow(self, data: pa.RecordBatch) -> None: ... diff --git a/python/pysrc/kaskada/_result.py b/python/pysrc/kaskada/_result.py new file mode 100644 index 000000000..eea4e04ac --- /dev/null +++ b/python/pysrc/kaskada/_result.py @@ -0,0 +1,149 @@ +from __future__ import annotations + +from typing import AsyncIterator +from typing import Iterator + +import pandas as pd +import pyarrow as pa + +from . import _ffi + + +class Result(object): + """Result of running a timestream query.""" + + def __init__(self, ffi_execution: _ffi.Execution) -> None: + """Create a result object for the given FFI execution.""" + self._ffi_execution = ffi_execution + + def to_pandas(self) -> pd.DataFrame: + """ + Convert the result to a Pandas DataFrame. + + Returns + ------- + pd.DataFrame + The result as a Pandas DataFrame. + + Warnings + -------- + This method will block on the complete results of the query and collect + all results into memory. If this is not desired, use `iter_pandas` instead. + """ + return self.to_pyarrow().to_pandas() + + def to_pyarrow(self) -> pa.Table: + """ + Convert the result to a PyArrow Table. + + Returns + ------- + pa.Table + The result as a PyArrow Table. + + Warnings + -------- + This method will block on the complete results of the query and collect + all results into memory. If this is not desired, use `iter_pyarrow` instead. + """ + batches = self._ffi_execution.collect_pyarrow() + if len(batches) == 0: + return pa.Table.from_batches([], schema=pa.schema([])) + + table = pa.Table.from_batches(batches) + table = table.drop_columns(["_subsort", "_key_hash"]) + return table + + def iter_pyarrow(self) -> Iterator[pa.RecordBatch]: + """ + Iterate over the results as PyArrow RecordBatches. + + Yields + ------ + pa.RecordBatch + The next RecordBatch. + """ + next_batch = self._ffi_execution.next_pyarrow() + while next_batch is not None: + # Annoyingly, PyArrow doesn't suport `drop_columns` on batches. + # So we need to convert to a Table and back. + table = pa.Table.from_batches([next_batch]) + table = table.drop_columns(["_subsort", "_key_hash"]) + for batch in table.to_batches(): + yield batch + + next_batch = self._ffi_execution.next_pyarrow() + + def iter_pandas(self) -> Iterator[pd.DataFrame]: + """ + Iterate over the results as Pandas DataFrames. + + Yields + ------ + pd.DataFrame + The next Pandas DataFrame. + """ + for batch in self.iter_pyarrow(): + yield batch.to_pandas() + + def iter_rows(self) -> Iterator[dict]: + """ + Iterate over the results as row dictionaries. + + Yields + ------ + dict + The next row as a dictionary. + """ + for batch in self.iter_pyarrow(): + for row in batch.to_pylist(): + yield row + + async def iter_pyarrow_async(self) -> AsyncIterator[pa.RecordBatch]: + """ + Asynchronously iterate over the results as PyArrow RecordBatches. + + Yields + ------ + pa.RecordBatch + The next RecordBatch. + """ + next_batch = await self._ffi_execution.next_pyarrow_async() + while next_batch is not None: + # Annoyingly, PyArrow doesn't suport `drop_columns` on batches. + # So we need to convert to a Table and back. + table = pa.Table.from_batches([next_batch]) + table = table.drop_columns(["_subsort", "_key_hash"]) + for batch in table.to_batches(): + yield batch + + next_batch = await self._ffi_execution.next_pyarrow_async() + + async def iter_pandas_async(self) -> AsyncIterator[pd.DataFrame]: + """ + Asynchronously iterate over the results as Pandas DataFrames. + + Yields + ------ + pd.DataFrame + The next Pandas DataFrame. + """ + async for batch in self.iter_pyarrow_async(): + yield batch.to_pandas() + + async def iter_rows_async(self) -> AsyncIterator[dict]: + """ + Asycnchronously iterate over the results as row dictionaries. + + Yields + ------ + dict + The next row as a dictionary. + """ + async for batch in self.iter_pyarrow_async(): + for row in batch.to_pylist(): + yield row + + def stop(self) -> None: + """Stop the underlying execution.""" + self._ffi_execution.stop() diff --git a/python/pysrc/kaskada/_session.py b/python/pysrc/kaskada/_session.py new file mode 100644 index 000000000..8cfa0b970 --- /dev/null +++ b/python/pysrc/kaskada/_session.py @@ -0,0 +1,45 @@ +"""Defines methods for initializing the Kaskada session.""" + +from typing import Optional + +from kaskada import _ffi + + +_SESSION: Optional[_ffi.Session] = None + + +def init_session() -> None: + """ + Initialize the Kaskada session for this Python process. + + This must only be called once per session. It must be called before + any other Kaskada functions are called. + + Raises + ------ + RuntimeError + If the session has already been initialized. + """ + global _SESSION + if _SESSION is not None: + raise RuntimeError("Session has already been initialized") + _SESSION = _ffi.Session() + + +def _get_session() -> _ffi.Session: + """ + Assert that the session has been initialized. + + Returns + ------- + _ffi.Session + The FFI session handle. + + Raises + ------ + AssertionError + If the session has not been initialized. + """ + global _SESSION + assert _SESSION is not None, "Session has not been initialized" + return _SESSION diff --git a/python/pysrc/kaskada/_timestream.py b/python/pysrc/kaskada/_timestream.py new file mode 100644 index 000000000..f5d761cae --- /dev/null +++ b/python/pysrc/kaskada/_timestream.py @@ -0,0 +1,1426 @@ +"""Defines classes representing Kaskada expressions.""" + +from __future__ import annotations + +import sys +import warnings +from datetime import datetime +from datetime import timedelta +from typing import Callable +from typing import List +from typing import Mapping +from typing import Optional +from typing import Sequence +from typing import Tuple +from typing import Union +from typing import final + +import kaskada as kd +import kaskada._ffi as _ffi +import pandas as pd +import pyarrow as pa +from typing_extensions import TypeAlias + +from ._execution import ExecutionOptions +from ._result import Result + + +#: A literal value that can be used as an argument to a Timestream operation. +Literal: TypeAlias = Optional[Union[int, str, float, bool, timedelta, datetime]] + +#: A Timestream or literal which can be used as an argument to a Timestream operation. +Arg: TypeAlias = Union["Timestream", Literal] + + +def _augment_error( + args: Sequence[Union[Timestream, Literal]], e: Exception +) -> Exception: + """Augment an error with information about the arguments.""" + if sys.version_info >= (3, 11): + # If we can add notes to the exception, indicate the types. + # This works in Python >=3.11 + for n, arg in enumerate(args): + if isinstance(arg, Timestream): + e.add_note(f"Arg[{n}]: Timestream[{arg.data_type}]") + else: + e.add_note(f"Arg[{n}]: Literal {arg} ({type(arg)})") + return e + + +class Timestream(object): + """A `Timestream` represents a computation producing a Timestream.""" + + _ffi_expr: _ffi.Expr + + def __init__(self, ffi: _ffi.Expr) -> None: + """Create a new expression.""" + self._ffi_expr = ffi + + @staticmethod + def _literal(value: Literal, session: _ffi.Session) -> Timestream: + """Construct a Timestream for a literal value.""" + if isinstance(value, timedelta): + raise TypeError("Cannot create a literal Timestream from a timedelta") + elif isinstance(value, datetime): + raise TypeError("Cannot create a literal Timestream from a datetime") + return Timestream(_ffi.Expr.literal(session, value)) + + @staticmethod + def _call( + func: str, + *args: Union[Timestream, Literal], + session: Optional[_ffi.Session] = None, + ) -> Timestream: + """ + Construct a new Timestream by calling the given function. + + Parameters + ---------- + func : str + Name of the function to apply. + *args : Timestream | int | str | float | bool | None + List of arguments to the expression. + session : FFI Session + FFI Session to create the expression in. + If unspecified, will infer from the arguments. + Will fail if all arguments are literals and the session is not provided. + + Returns + ------- + Timestream + Timestream representing the result of the function applied to the arguments. + + Raises + ------ + # noqa: DAR401 _augment_error + TypeError + If the argument types are invalid for the given function. + ValueError + If the argument values are invalid for the given function. + """ + if session is None: + session = next( + arg._ffi_expr.session() for arg in args if isinstance(arg, Timestream) + ) + + def make_arg(arg: Union[Timestream, Literal]) -> _ffi.Expr: + if isinstance(arg, Timestream): + return arg._ffi_expr + else: + return Timestream._literal(arg, session)._ffi_expr + + ffi_args = [make_arg(arg) for arg in args] + try: + return Timestream( + # TODO: FRAZ - so I need a `call` that can take the udf + _ffi.Expr.call(session=session, operation=func, args=ffi_args) + ) + except TypeError as e: + # noqa: DAR401 + raise _augment_error(args, TypeError(str(e))) from e + except ValueError as e: + raise _augment_error(args, ValueError(str(e))) from e + + @property + def data_type(self) -> pa.DataType: + """The PyArrow type of values in this Timestream.""" + return self._ffi_expr.data_type() + + @property + def is_continuous(self) -> bool: + """Returns true if this Timestream is continuous.""" + return self._ffi_expr.is_continuous() + + @final + def pipe( + self, + func: Union[Callable[..., Timestream], Tuple[Callable[..., Timestream], str]], + *args: Union[Timestream, Literal], + **kwargs: Union[Timestream, Literal], + ) -> Timestream: + """ + Apply chainable functions that produce Timestreams. + + Parameters + ---------- + func : Callable[..., Timestream] | Tuple[Callable[..., Timestream], str] + Function to apply to this Timestream. Alternatively a `(func, + keyword)` tuple where `keyword` is a string indicating the keyword + of `func` that expects the Timestream. + args : iterable, optional + Positional arguments passed into ``func``. + kwargs : mapping, optional + A dictionary of keyword arguments passed into ``func``. + + Returns + ------- + Timestream + The result of applying `func` to the arguments. + + Raises + ------ + ValueError + When using `self` with a specific `keyword` if the `keyword` also + appears on in the `kwargs`. + + Notes + ----- + Use ``.pipe`` when chaining together functions that expect Timestreams. + + Examples + -------- + Instead of writing + + >>> func(g(h(df), arg1=a), arg2=b, arg3=c) # doctest: +SKIP + + You can write + + >>> (df.pipe(h) + >>> .pipe(g, arg1=a) + >>> .pipe(func, arg2=b, arg3=c) + >>> ) # doctest: +SKIP + + If you have a function that takes the data as (say) the second + argument, pass a tuple indicating which keyword expects the + data. For example, suppose ``func`` takes its data as ``arg2``: + + >>> (df.pipe(h) + >>> .pipe(g, arg1=a) + >>> .pipe((func, 'arg2'), arg1=a, arg3=c) + >>> ) # doctest: +SKIP + """ + if isinstance(func, tuple): + func, target = func + if target in kwargs: + msg = f"{target} is both the pipe target and a keyword argument" + raise ValueError(msg) + kwargs[target] = self + return func(*args, **kwargs) + else: + return func(self, *args, **kwargs) + + def add(self, rhs: Union[Timestream, Literal]) -> Timestream: + """ + Create a Timestream adding this and `rhs`. + + Parameters + ---------- + rhs : Union[Timestream, Literal] + The Timestream or literal value to add to this. + + Returns + ------- + Timestream + The Timestream resulting from `self + rhs`. + + Notes + ----- + You can also write `a.add(b)` as `a + b`. + """ + if isinstance(rhs, timedelta): + # Right now, we can't convert a time delta directly to a scalar value (literal). + # So we convert it to seconds and then add it. + # Note that this loses precision if the timedelta has a fractional number of seconds, + # and fail if the number of seconds exceeds an integer. + session = self._ffi_expr.session() + seconds = Timestream._call( + "seconds", int(rhs.total_seconds()), session=session + ) + return Timestream._call("add_time", seconds, self) + else: + return Timestream._call("add", self, rhs) + + def __add__(self, rhs: Union[Timestream, Literal]) -> Timestream: + """Implement `self + rhs`.""" + return self.add(rhs) + + def __radd__(self, lhs: Union[Timestream, Literal]) -> Timestream: + """Implement `lhs + self`.""" + if not isinstance(lhs, Timestream): + lhs = Timestream._literal(lhs, self._ffi_expr.session()) + return lhs.add(self) + + def sub(self, rhs: Union[Timestream, Literal]) -> Timestream: + """ + Create a Timestream substracting `rhs` from this. + + Parameters + ---------- + rhs : Union[Timestream, Literal] + The Timestream or literal value to subtract from this. + + Returns + ------- + Timestream + The Timestream resulting from `self - rhs`. + + Notes + ----- + You can also write `a.sub(b)` as `a - b`. + """ + return Timestream._call("sub", self, rhs) + + def __sub__(self, rhs: Union[Timestream, Literal]) -> Timestream: + """Implement `self - rhs`.""" + return self.sub(rhs) + + def __rsub__(self, lhs: Union[Timestream, Literal]) -> Timestream: + """Implement `lhs - self`.""" + if not isinstance(lhs, Timestream): + lhs = Timestream._literal(lhs, self._ffi_expr.session()) + return lhs.sub(self) + + def mul(self, rhs: Union[Timestream, Literal]) -> Timestream: + """ + Create a Timestream multiplying this and `rhs`. + + Parameters + ---------- + rhs : Union[Timestream, Literal] + The Timestream or literal value to multiply with this. + + Returns + ------- + Timestream + The Timestream resulting from `self * rhs`. + + Notes + ----- + You can also write `a.mul(b)` as `a * b`. + """ + return Timestream._call("mul", self, rhs) + + def __mul__(self, rhs: Union[Timestream, Literal]) -> Timestream: + """Implement `self * rhs`.""" + return self.mul(rhs) + + def __rmul__(self, lhs: Union[Timestream, Literal]) -> Timestream: + """Implement `lhs * self`.""" + if not isinstance(lhs, Timestream): + lhs = Timestream._literal(lhs, self._ffi_expr.session()) + return lhs.mul(self) + + def div(self, divisor: Union[Timestream, Literal]) -> Timestream: + """ + Create a Timestream by dividing this and `divisor`. + + Parameters + ---------- + divisor : Union[Timestream, Literal] + The Timestream or literal value to divide this by. + + Returns + ------- + Timestream + The Timestream resulting from `self / divisor`. + + Notes + ----- + You can also write `a.div(b)` as `a / b`. + """ + return Timestream._call("div", self, divisor) + + def __truediv__(self, divisor: Union[Timestream, Literal]) -> Timestream: + """Implement `self / divisor`.""" + return self.div(divisor) + + def __rtruediv__(self, dividend: Union[Timestream, Literal]) -> Timestream: + """Implement `dividend / self`.""" + if not isinstance(dividend, Timestream): + dividend = Timestream._literal(dividend, self._ffi_expr.session()) + return dividend.div(self) + + def lt(self, rhs: Union[Timestream, Literal]) -> Timestream: + """ + Create a Timestream that is true if this is less than `rhs`. + + Parameters + ---------- + rhs : Union[Timestream, Literal] + The Timestream or literal value to compare to. + + Returns + ------- + Timestream + The Timestream resulting from `self < rhs`. + + Notes + ----- + You can also write `a.lt(b)` as `a < b`. + """ + return Timestream._call("lt", self, rhs) + + def __lt__(self, rhs: Union[Timestream, Literal]) -> Timestream: + """Implement `self < rhs`.""" + return self.lt(rhs) + + def le(self, rhs: Union[Timestream, Literal]) -> Timestream: + """ + Create a Timestream that is true if this is less than or equal to `rhs`. + + Parameters + ---------- + rhs : Union[Timestream, Literal] + The Timestream or literal value to compare to. + + Returns + ------- + Timestream + The Timestream resulting from `self <= rhs`. + + Notes + ----- + You can also write `a.le(b)` as `a <= b`. + """ + return Timestream._call("lte", self, rhs) + + def __le__(self, rhs: Union[Timestream, Literal]) -> Timestream: + """Implement `self <= rhs`.""" + return self.le(rhs) + + def gt(self, rhs: Union[Timestream, Literal]) -> Timestream: + """ + Create a Timestream that is true if this is greater than `rhs`. + + Parameters + ---------- + rhs : Union[Timestream, Literal] + The Timestream or literal value to compare to. + + Returns + ------- + Timestream + The Timestream resulting from `self > rhs`. + + Notes + ----- + You can also write `a.gt(b)` as `a > b`. + """ + return Timestream._call("gt", self, rhs) + + def __gt__(self, rhs: Union[Timestream, Literal]) -> Timestream: + """Implement `self > rhs`.""" + return self.gt(rhs) + + def ge(self, rhs: Union[Timestream, Literal]) -> Timestream: + """ + Create a Timestream that is true if this is greater than or equal to `rhs`. + + Parameters + ---------- + rhs : Union[Timestream, Literal] + The Timestream or literal value to compare to. + + Returns + ------- + Timestream + The Timestream resulting from `self >= rhs`. + + Notes + ----- + You can also write `a.ge(b)` as `a >= b`. + """ + return Timestream._call("gte", self, rhs) + + def __ge__(self, rhs: Union[Timestream, Literal]) -> Timestream: + """Implement `self >= rhs`.""" + return self.ge(rhs) + + def and_(self, rhs: Union[Timestream, Literal]) -> Timestream: + """ + Create the logical conjunction of this Timestream and `rhs`. + + Parameters + ---------- + rhs : Union[Timestream, Literal] + The Timestream or literal value to conjoin with. + + Returns + ------- + Timestream + The Timestream resulting from `self and rhs`. + """ + return Timestream._call("logical_and", self, rhs) + + def or_(self, rhs: Union[Timestream, Literal]) -> Timestream: + """ + Create the logical disjunction of this Timestream and `rhs`. + + Parameters + ---------- + rhs : Union[Timestream, Literal] + The Timestream or literal value to disjoin with. + + Returns + ------- + Timestream + The Timestream resulting from `self or rhs`. + """ + return Timestream._call("logical_or", self, rhs) + + def not_(self) -> Timestream: + """ + Create the logical negation of this Timestream. + + Returns + ------- + Timestream + The Timestream resulting from `not self`. + """ + return Timestream._call("not", self) + + def eq(self, other: Union[Timestream, Literal]) -> Timestream: + """ + Create a Timestream that is true if this is equal to `other`. + + Parameters + ---------- + other : Union[Timestream, Literal] + The Timestream or literal value to compare to. + + Returns + ------- + Timestream + The Timestream indicating whether the `self` and `other` are equal. + + Note + ---- + Equality is *not* available as `a == b`. + """ + return Timestream._call("eq", self, other) + + def ne(self, other: Union[Timestream, Literal]) -> Timestream: + """ + Create a Timestream that is true if this is not equal to `other`. + + Parameters + ---------- + other : Union[Timestream, Literal] + The Timestream or literal value to compare to. + + Returns + ------- + Timestream + The Timestream indicating whether `self` and `other` are not equal. + + Note + ---- + Inequality is *not* available as `a != b`. + """ + return Timestream._call("neq", self, other) + + def __eq__(self, other: object) -> bool: + """Warn when Timestreams are compared using `==`.""" + warnings.warn( + "Using '==' with Timestreams doesn't produce a boolean stream. Use 'eq' instead.", + stacklevel=2, + ) + return super().__eq__(other) + + def __ne__(self, other: object) -> bool: + """Warn when Timestreams are compared using `!=`.""" + warnings.warn( + "Using '!=' with Timestreams doesn't produce a boolean stream. Use 'ne' instead.", + stacklevel=2, + ) + return super().__ne__(other) + + def index(self, key: Union[Timestream, Literal]) -> Timestream: + """ + Index into the elements of a Timestream. + + If the Timestream contains lists, the key should be an integer index. + + If the Timestream contains maps, the key should be the same type as the map keys. + + Parameters + ---------- + key : Union[Timestream, Literal] + The key to index into the expression. + + Raises + ------ + TypeError + When the Timestream is not a record, list, or map. + + Returns + ------- + Timestream + Timestream with the resulting value (or `null` if absent) at each point. + + Note + ---- + Indexing may be written using the operator `self[key]` instead of `self.index(key)`. + """ + data_type = self.data_type + if isinstance(data_type, pa.MapType): + return Timestream._call("get", key, self) + elif isinstance(data_type, pa.ListType): + return Timestream._call("index", key, self) + else: + raise TypeError(f"Cannot index into {data_type}") + + def __getitem__(self, key: Union[Timestream, Literal]) -> Timestream: + """ + Index into a list or map Timestrem. + + Parameters + ---------- + key : Union[Timestream, Literal] + The key to index into the expression. + + Returns + ------- + Timestream + Timestream with the resulting value (or `null` if absent) at each point. + + See Also + -------- + index + """ + return self.index(key) + + def col(self, name: str) -> Timestream: + """ + Access a named column or field of a Timestream. + + Parameters + ---------- + name : str + The name of the column or field to access. + + Returns + ------- + Timestream + Timestream with the resulting value (or `null` if absent) at each point. + + Raises + ------ + TypeError + When the Timestream is not a record. + """ + data_type = self.data_type + if isinstance(data_type, pa.StructType) or isinstance(data_type, pa.ListType): + return Timestream._call("fieldref", self, name) + else: + raise TypeError( + f"Cannot access column {name!r} of non-record type '{data_type}'" # noqa : B907 + ) + + def select(self, *args: str) -> Timestream: + """ + Select the given fields from a Timestream of records. + + Parameters + ---------- + args : list[str] + List of field names to select. + + Returns + ------- + Timestream + Timestream with the same records limited to the specified fields. + """ + return Timestream._call("select_fields", self, *args) + + def remove(self, *args: str) -> Timestream: + """ + Remove the given fileds from a Timestream of records. + + Parameters + ---------- + args : list[str] + List of field names to exclude. + + Returns + ------- + Timestream + Timestream with the same records and the given fields excluded. + """ + return Timestream._call("remove_fields", self, *args) + + def extend( + self, fields: Mapping[str, Arg] | Callable[[Timestream], Mapping[str, Arg]] + ) -> Timestream: + """ + Extend this Timestream of records with additional fields. + + If a field exists in the base Timestream and the `fields`, the value + from the `fields` will be taken. + + Parameters + ---------- + fields : Mapping[str, Arg] | Callable[[Timestream], Mapping[str, Arg]] + Fields to add to each record in the Timestream. + + Returns + ------- + Timestream + Timestream with the given fields added. + """ + # This argument order is weird, and we shouldn't need to make a record + # in order to do the extension. + if callable(fields): + fields = fields(self) + extension = record(fields) + return Timestream._call("extend_record", extension, self) + + def neg(self) -> Timestream: + """ + Create a Timestream from the numeric negation of self. + + Returns + ------- + Timestream + Timestream of the numeric negation of self. + """ + return Timestream._call("neg", self) + + def is_null(self) -> Timestream: + """ + Create a boolean Timestream containing `true` when self is `null`. + + Returns + ------- + Timestream + Timestream with `true` when self is `null` and `false` when it isn't. + """ + return self.is_not_null().not_() + + def is_not_null(self) -> Timestream: + """ + Create a boolean Timestream containing `true` when self is not `null`. + + Returns + ------- + Timestream + Timestream with `true` when self is not `null` and `false` when it is. + """ + return Timestream._call("is_valid", self) + + def filter(self, condition: Timestream) -> Timestream: + """ + Create a Timestream containing only the points where `condition` is `true`. + + Parameters + ---------- + condition : Timestream + The condition to filter on. + + Returns + ------- + Timestream + Timestream containing `self` where `condition` is `true`. + """ + return Timestream._call("when", condition, self) + + def collect( + self, + *, + max: Optional[int], + min: Optional[int] = 0, + window: Optional[kd.windows.Window] = None, + ) -> Timestream: + """ + Create a Timestream collecting up to the last `max` values in the `window`. + + Collects the values for each key separately. + + Parameters + ---------- + max : Optional[int] + The maximum number of values to collect. + If `None` all values are collected. + min: Optional[int] + The minimum number of values to collect before + producing a value. Defaults to 0. + window : Optional[Window] + The window to use for the aggregation. + If not specified, the entire Timestream is used. + + Returns + ------- + Timestream + Timestream containing the collected list at each point. + """ + if pa.types.is_list(self.data_type): + return ( + record({"value": self}) + .collect(max=max, min=min, window=window) + .col("value") + ) + else: + return _aggregation("collect", self, window, max, min) + + def time(self) -> Timestream: + """ + Create a Timestream containing the time of each point. + + Returns + ------- + Timestream + Timestream containing the time of each point. + """ + return Timestream._call("time_of", self) + + def lag(self, n: int) -> Timestream: + """ + Create a Timestream containing the value `n` points before each point. + + Parameters + ---------- + n : int + The number of points to lag by. + + Returns + ------- + Timestream + Timestream containing the value `n` points before each point. + """ + # hack to support structs/lists (as collect supports lists) + return self.collect(max=n + 1, min=n + 1)[0] + + def if_(self, condition: Union[Timestream, Literal]) -> Timestream: + """ + Return `self` where `condition` is `true`, or `null` otherwise. + + Parameters + ---------- + condition : Union[Timestream, Literal] + The condition to check. + + Returns + ------- + Timestream + Timestream containing the value of `self` where `condition` is `true`, or + `null` otherwise. + """ + return Timestream._call("if", condition, self) + + def null_if(self, condition: Union[Timestream, Literal]) -> Timestream: + """ + Return `self` where `condition` is `false`, or `null` otherwise. + + Parameters + ---------- + condition : Union[Timestream, Literal] + The condition to check. + + Returns + ------- + Timestream + Timestream containing the value of `self` where `condition` is `false`, or + `null` otherwise. + """ + return Timestream._call("null_if", condition, self) + + def length(self) -> Timestream: + """ + Create a Timestream containing the length of `self`. + + Returns + ------- + Timestream + Timestream containing the length of `self`. + + Raises + ------ + TypeError + When the Timestream is not a string or list. + """ + if self.data_type.equals(pa.string()): + return Timestream._call("len", self) + elif isinstance(self.data_type, pa.ListType): + return Timestream._call("list_len", self) + else: + raise TypeError(f"length not supported for {self.data_type}") + + def with_key(self, key: Timestream, grouping: Optional[str] = None) -> Timestream: + """ + Create a Timestream with a new grouping by `key`. + + Parameters + ---------- + key : Timestream + The new key to use for the grouping. + grouping : Optional[str] + A string literal naming the new grouping. If no `grouping` is specified, + one will be computed from the type of the `key`. + + Returns + ------- + Timestream + Timestream with a new grouping by `key`. + """ + return Timestream._call("with_key", key, self, grouping) + + def lookup(self, key: Union[Timestream, Literal]) -> Timestream: + """ + Lookup the value of `self` for each `key` at the times in `key`. + + For each non-`null` point in the `key` timestream, returns the value + from `self` at that time and associated with that `key`. Returns `null` + if the `key` is `null` or if there is no `value` computed for that key + at the corresponding time. + + Parameters + ---------- + key : Union[Timestream, Literal] + The foreign key to lookup. + This must match the type of the keys in `self`. + + Returns + ------- + Timestream + Timestream containing the lookup join between the `key` and `self`. + """ + return Timestream._call("lookup", key, self) + + def shift_to(self, time: Union[Timestream, datetime]) -> Timestream: + """ + Create a Timestream shifting each point forward to `time`. + + If multiple values are shifted to the same time, they will be emitted in + the order in which they originally occurred. + + Parameters + ---------- + time : Union[Timestream, datetime] + The time to shift to. + This must be a datetime or a Timestream of timestamp_ns. + + Returns + ------- + Timestream + Timestream containing the shifted points. + + Raises + ------ + NotImplementedError + When `time` is a datetime (shift_to literal not yet implemented). + """ + if isinstance(time, datetime): + # session = self._ffi_expr.session() + # time_ns = time.timestamp() * 1e9 + # time_ns = Timestream._literal(time_ns, session=session) + # time_ns = Timestream.cast(time_ns, pa.timestamp('ns')) + # return Timestream._call("shift_to", time_ns, self) + raise NotImplementedError("shift_to with datetime literal unsupported") + else: + return Timestream._call("shift_to", time, self) + + def shift_by(self, delta: Union[Timestream, timedelta]) -> Timestream: + """ + Create a Timestream shifting each point forward by the `delta`. + + If multiple values are shifted to the same time, they will be emitted in + the order in which they originally occurred. + + Parameters + ---------- + delta : Union[Timestream, timedelta] + The delta to shift the point forward by. + + Returns + ------- + Timestream + Timestream containing the shifted points. + """ + if isinstance(delta, timedelta): + session = self._ffi_expr.session() + seconds = Timestream._call( + "seconds", int(delta.total_seconds()), session=session + ) + return Timestream._call("shift_by", seconds, self) + else: + return Timestream._call("shift_by", delta, self) + + def shift_until(self, predicate: Timestream) -> Timestream: + """ + Shift points from `self` forward to the next time `predicate` is true. + + Note that if the `predicate` evaluates to true at the same time as `self`, + the point will be emitted at that time. + + If multiple values are shifted to the same time, they will be emitted in + the order in which they originally occurred. + + Parameters + ---------- + predicate : Timestream + The predicate to determine whether to emit shifted rows. + + Returns + ------- + Timestream + Timestream containing the shifted points. + """ + return Timestream._call("shift_until", predicate, self) + + def sum(self, *, window: Optional[kd.windows.Window] = None) -> Timestream: + """ + Create a Timestream summing the values in the `window`. + + Computes the sum for each key separately. + + Parameters + ---------- + window : Optional[Window] + The window to use for the aggregation. + If not specified, the entire Timestream is used. + + Returns + ------- + Timestream + Timestream containing the sum up to and including each point. + """ + return _aggregation("sum", self, window) + + def first(self, *, window: Optional[kd.windows.Window] = None) -> Timestream: + """ + Create a Timestream containing the first value in the `window`. + + Computed for each key separately. + + Parameters + ---------- + window : Optional[Window] + The window to use for the aggregation. + If not specified, the entire Timestream is used. + + Returns + ------- + Timestream + Timestream containing the first value for the key in the window for + each point. + """ + return _aggregation("first", self, window) + + def last(self, window: Optional[kd.windows.Window] = None) -> Timestream: + """ + Create a Timestream containing the last value in the `window`. + + Computed for each key separately. + + Parameters + ---------- + window : Optional[Window] + The window to use for the aggregation. + If not specified, the entire Timestream is used. + + Returns + ------- + Timestream + Timestream containing the last value for the key in the window for + each point. + """ + return _aggregation("last", self, window) + + def count(self, window: Optional[kd.windows.Window] = None) -> Timestream: + """ + Create a Timestream containing the count value in the `window`. + + Computed for each key separately. + + Parameters + ---------- + window : Optional[Window] + The window to use for the aggregation. + If not specified, the entire Timestream is used. + + Returns + ------- + Timestream + Timestream containing the count value for the key in the window for + each point. + """ + return _aggregation("count", self, window) + + def count_if(self, window: Optional[kd.windows.Window] = None) -> Timestream: + """ + Create a Timestream containing the count of `true` values in `window`. + + Computed for each key separately. + + Parameters + ---------- + window : Optional[Window] + The window to use for the aggregation. + If not specified, the entire Timestream is used. + + Returns + ------- + Timestream + Timestream containing the count value if true for the key in the window for + each point. + """ + return _aggregation("count_if", self, window) + + def max(self, window: Optional[kd.windows.Window] = None) -> Timestream: + """ + Create a Timestream containing the max value in the `window`. + + Computed for each key separately. + + Parameters + ---------- + window : Optional[Window] + The window to use for the aggregation. + If not specified, the entire Timestream is used. + + Returns + ------- + Timestream + Timestream containing the max value for the key in the window for + each point. + """ + return _aggregation("max", self, window) + + def min(self, window: Optional[kd.windows.Window] = None) -> Timestream: + """ + Create a Timestream containing the min value in the `window`. + + Computed for each key separately. + + Parameters + ---------- + window : Optional[Window] + The window to use for the aggregation. + If not specified, the entire Timestream is used. + + Returns + ------- + Timestream + Timestream containing the min value for the key in the window for + each point. + """ + return _aggregation("min", self, window) + + def mean(self, window: Optional[kd.windows.Window] = None) -> Timestream: + """ + Create a Timestream containing the mean value in the `window`. + + Computed for each key separately. + + Parameters + ---------- + window : Optional[Window] + The window to use for the aggregation. + If not specified, the entire Timestream is used. + + Returns + ------- + Timestream + Timestream containing the mean value for the key in the window for + each point. + """ + return _aggregation("mean", self, window) + + def stddev(self, window: Optional[kd.windows.Window] = None) -> Timestream: + """ + Create a Timestream containing the standard deviation in the `window`. + + Computed for each key separately. + + Parameters + ---------- + window : Optional[Window] + The window to use for the aggregation. + If not specified, the entire Timestream is used. + + Returns + ------- + Timestream + Timestream containing the standard deviation for the key in the window for + each point. + """ + return _aggregation("stddev", self, window) + + def variance(self, window: Optional[kd.windows.Window] = None) -> Timestream: + """ + Create a Timestream containing the variance in the `window`. + + Computed for each key separately. + + Parameters + ---------- + window : Optional[Window] + The window to use for the aggregation. + If not specified, the entire Timestream is used. + + Returns + ------- + Timestream + Timestream containing the variance for the key in the window for + each point. + """ + return _aggregation("variance", self, window) + + def cast(self, data_type: pa.DataType) -> Timestream: + """ + Cast the type of this Timestream to the given data type. + + Parameters + ---------- + data_type : pa.DataType + The data type to cast to. + + Returns + ------- + Timestream + Timestream with the given data type. + """ + return Timestream(self._ffi_expr.cast(data_type)) + + def else_(self, other: Timestream) -> Timestream: + """ + Return `self` if not `null` otherwise `other`. + + Parameters + ---------- + other : Timestream + The Timestream to use if self is `null`. + + Returns + ------- + Timestream + Timestream containing the value of `self` not `null` otherwise `other`. + """ + return Timestream._call("else", other, self) + + def seconds_since(self, time: Union[Timestream, Literal]) -> Timestream: + """ + Return a Timestream containing seconds between `time` and `self`. + + Parameters + ---------- + time : Union[Timestream, Literal] + The time to compute the seconds since. + + This can be either a stream of timestamps or a datetime literal. + If `time` is a Timestream, the result will contain the seconds + from `self.time()` to `time.time()` for each point. + + Returns + ------- + Timestream + Timestream containing the number of seconds since `time`. + + If `self.time()` is greater than `time`, the result will be positive. + """ + if isinstance(time, datetime): + session = self._ffi_expr.session() + nanos = Timestream._literal(time.timestamp() * 1e9, session=session) + nanos = Timestream.cast(nanos, pa.timestamp("ns", None)) + return Timestream._call("seconds_between", nanos, self) + else: + return Timestream._call("seconds_between", time, self) + + def seconds_since_previous(self, n: int = 1) -> Timestream: + """ + Return a Timestream containing seconds between `self` and the time `n` points ago. + + Parameters + ---------- + n : int + The number of points to look back. For example, `n=1` refers to + the previous point. + + Defaults to 1 (the previous point). + + Returns + ------- + Timestream + Timestream containing the number of seconds since the time `n` + points ago. + """ + time_of_current = Timestream._call("time_of", self).cast(pa.int64()) + time_of_previous = Timestream._call("time_of", self).lag(n).cast(pa.int64()) + + # `time_of` returns nanoseconds, so divide to get seconds + return time_of_current.sub(time_of_previous).div(1e9).cast(pa.duration("s")) + + def flatten(self) -> Timestream: + """Flatten a list of lists to a list of values.""" + return Timestream._call("flatten", self) + + def union(self, other: Timestream) -> Timestream: + """ + Union the lists in this timestream with the lists in the other Timestream. + + This correspons to a pair-wise union within each row of the timestreams. + + Parameters + ---------- + other : Timestream + The Timestream of lists to union with. + + Returns + ------- + Timestream + Timestream containing the union of the lists. + """ + return Timestream._call("union", self, other) + + def record(self, fields: Callable[[Timestream], Mapping[str, Arg]]) -> Timestream: + """ + Create a record Timestream from fields computed from this timestream. + + Parameters + ---------- + fields : Callable[[Timestream], Mapping[str, Arg]] + The fields to include in the record. + + Returns + ------- + Timestream + Timestream containing records with the given fields. + + See Also + -------- + kaskada.record: Function for creating a record from one or more + timestreams. + """ + return record(fields(self)) + + def preview(self, limit: int = 100) -> pd.DataFrame: + """ + Return the first N rows of the result as a Pandas DataFrame. + + This makes it easy to preview the content of the Timestream. + + Parameters + ---------- + limit : int + Maximum number of rows to print. + + Returns + ------- + pd.DataFrame + The Pandas DataFrame containing the first `limit` points. + """ + return self.run(row_limit=limit).to_pandas() + + def run( + self, + row_limit: Optional[int] = None, + max_batch_size: Optional[int] = None, + materialize: bool = False, + ) -> Result: + """ + Run the Timestream once. + + Parameters + ---------- + row_limit : Optional[int] + The maximum number of rows to return. + If not specified all rows are returned. + + max_batch_size : Optional[int] + The maximum number of rows to return in each batch. + If not specified the default is used. + + materialize : bool + If true, the execution will be a continuous materialization. + + Returns + ------- + Result + The `Result` object to use for accessing the results. + """ + expr = self + if not pa.types.is_struct(self.data_type): + # The execution engine requires a struct, so wrap this in a record. + expr = record({"result": self}) + options = ExecutionOptions( + row_limit=row_limit, max_batch_size=max_batch_size, materialize=materialize + ) + execution = expr._ffi_expr.execute(options) + return Result(execution) + + +def _aggregation( + op: str, + input: Timestream, + window: Optional[kd.windows.Window], + *args: Union[Timestream, Literal], +) -> Timestream: + """ + Create the aggregation `op` with the given `input`, `window` and `args`. + + Parameters + ---------- + op : str + The operation to create. + input : Timestream + The input to the aggregation. + window : Optional[Window] + The window to use for the aggregation. + *args : Union[Timestream, Literal] + Additional arguments to provide after `input` and before the flattened window. + + Returns + ------- + Timestream + The resulting Timestream. + + Raises + ------ + NotImplementedError + If the window is not a known type. + """ + if window is None: + return Timestream._call(op, input, *args, None, None) + elif isinstance(window, kd.windows.Since): + return Timestream._call(op, input, *args, window.predicate, None) + elif isinstance(window, kd.windows.Sliding): + return Timestream._call(op, input, *args, window.predicate, window.duration) + elif isinstance(window, kd.windows.Trailing): + if op != "collect": + raise NotImplementedError( + f"Aggregation '{op} does not support trailing windows" + ) + + trailing_ns = int(window.duration.total_seconds() * 1e9) + + # Create the shifted-forward input + input_shift = input.shift_by(window.duration) + + # Merge, then extract just the input. + # + # Note: Assumes the "input" is discrete. Can probably + # use a transform to make it discrete (eg., `input.filter(True)`) + # or a special function to do that. + # + # HACK: This places an extra null row in the input to `collect` + # which allows us to "clear" the window when the appropriate + # `duration` has passed with no "real" inputs. + merged_input = record({"input": input, "shift": input_shift}).col("input") + return Timestream._call("collect", merged_input, *args, None, trailing_ns) + else: + raise NotImplementedError(f"Unknown window type {window!r}") + + +def record(fields: Mapping[str, Arg]) -> Timestream: + """ + Create a record Timestream from the given fields. + + Parameters + ---------- + fields : dict[str, Timestream] + The fields to include in the record. + + Returns + ------- + Timestream + Timestream containing records with the given fields. + + See Also + -------- + Timestream.record: Method for creating a record from fields computed from + a timestream. + """ + import itertools + + args: List[Arg] = list(itertools.chain(*fields.items())) + return Timestream._call("record", *args) diff --git a/python/pysrc/kaskada/plot.py b/python/pysrc/kaskada/plot.py new file mode 100644 index 000000000..68208db09 --- /dev/null +++ b/python/pysrc/kaskada/plot.py @@ -0,0 +1,109 @@ +"""Render timestreams using Plotly.""" + +try: + import plotly.graph_objects as go + import plotly.io as pio + from plotly.subplots import make_subplots + + def _require_plotly() -> None: + pass + +except ImportError: + + def _require_plotly() -> None: + raise ImportError( + "Plotly is not installed. Install it with `pip install plotly`." + ) + + +import itertools +from dataclasses import dataclass +from typing import Optional + +from ._timestream import Timestream + + +@dataclass +class Plot(object): + """ + Configuration for a single plot to render. + + Parameters + ---------- + stream : Timestream + The Timestream to render. + name: str + The name of the plot to render. + Defaults to `Result` if not set. + """ + + stream: Timestream + name: Optional[str] = None + + +def render( + *args: Plot, theme: Optional[str] = None, title_text: Optional[str] = None +) -> None: + """Render one or more plots.""" + _require_plotly() + + if theme is None: + theme = "plotly" + + template = pio.templates[theme] + + fig = make_subplots(rows=len(args), cols=1) + + next_color = itertools.cycle(template.layout.colorway) + color_map = {} + + for row, plot in enumerate(args, 1): + data = plot.stream.run().to_pandas() + name = plot.name or f"Result {row}" + + for key in data["_key"].unique(): + # TODO: Change markers when colorway cycles. + # TODO: Use different markers for different entity types. + # TODO: Render categorical values. + # TODO: Warn if number of points / keys exceeds some threshold. + new = False + if key not in color_map: + new = True + color_map[key] = next(next_color) + color = color_map[key] + + points = data[data["_key"] == key] + + # TODO: Render arrows for continuous timestreams. + # TODO: Render discontinuity at window boundaries. + fig.append_trace( + go.Scatter( + x=points["_time"], + y=points["result"], + mode="markers+lines" if plot.stream.is_continuous else "markers", + marker=dict( + color=color, + ), + line={"shape": "hv"}, + legendgroup=key, + showlegend=new, + name=str(key), + ), + row=row, + col=1, + ) + fig["layout"][f"xaxis{row}"]["title"] = "Time" + fig["layout"][f"yaxis{row}"]["title"] = name + + fig.update_layout(height=200 * len(args), width=600, title_text=title_text) + + fig.show() + + +def _assign_colors(next_color, color_map, keys): + colors = [] + for key in keys: + if key not in color_map: + color_map[key] = next(next_color) + colors.append(color_map[key]) + return colors diff --git a/python/pysrc/kaskada/py.typed b/python/pysrc/kaskada/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/python/pysrc/kaskada/sources/__init__.py b/python/pysrc/kaskada/sources/__init__.py new file mode 100644 index 000000000..129f0c083 --- /dev/null +++ b/python/pysrc/kaskada/sources/__init__.py @@ -0,0 +1,10 @@ +"""Sources of data for Kaskada queries.""" +from .arrow import CsvString +from .arrow import JsonlString +from .arrow import Pandas +from .arrow import Parquet +from .arrow import PyList +from .source import Source + + +__all__ = ["Source", "CsvString", "Pandas", "JsonlString", "PyList", "Parquet"] diff --git a/python/pysrc/kaskada/sources/arrow.py b/python/pysrc/kaskada/sources/arrow.py new file mode 100644 index 000000000..b238b40af --- /dev/null +++ b/python/pysrc/kaskada/sources/arrow.py @@ -0,0 +1,224 @@ +"""Provide sources based on PyArrow, including Pandas and CSV.""" +from __future__ import annotations + +from io import BytesIO +from typing import Optional + +import pandas as pd +import pyarrow as pa +import pyarrow.csv +import pyarrow.json +import pyarrow.parquet + +from .source import Source + + +class Pandas(Source): + """Source reading data from Pandas dataframe.""" + + def __init__( + self, dataframe: pd.DataFrame, *, schema: Optional[pa.Schema] = None, **kwargs + ) -> None: + """ + Create a source reading Pandas DataFrames. + + Parameters + ---------- + dataframe : pd.DataFrame + The DataFrame to start from. + schema : pa.Schema, optional + The schema to use. + If not specified, it will be inferred from the `dataframe`. + **kwargs : dict, optional + Additional keyword arguments to pass to the super class. + Should include the required column names. + + See Also + -------- + Source.__init__ : For required keyword arguments. + """ + if schema is None: + schema = pa.Schema.from_pandas(dataframe) + super().__init__(schema, **kwargs) + self.add_data(dataframe) + + def add_data(self, data: pd.DataFrame) -> None: + """Add data to the source.""" + table = pa.Table.from_pandas(data, self._schema, preserve_index=False) + for batch in table.to_batches(): + self._ffi_table.add_pyarrow(batch) + + +class PyList(Source): + """Source reading data from lists of dicts.""" + + def __init__( + self, rows: dict | list[dict], *, schema: Optional[pa.Schema] = None, **kwargs + ) -> None: + """ + Create a source reading from rows represented as dicts. + + Parameters + ---------- + rows : dict | list[dict] + One or more represented as dicts. + schema : pa.Schema, optional + The schema to use. + If not provided, the schema will be inferred from the `csv_string`. + **kwargs : dict, optional + Additional keyword arguments to pass to the super class. + Should include the required column names. + + See Also + -------- + Source.__init__ : For required keyword arguments. + """ + if schema is None: + schema = pa.Table.from_pylist(rows).schema + super().__init__(schema, **kwargs) + + self._convert_options = pyarrow.csv.ConvertOptions(column_types=schema) + self.add_rows(rows) + + def add_rows(self, rows: dict | list[dict]) -> None: + """Add data to the source.""" + if isinstance(rows, dict): + rows = [rows] + table = pa.Table.from_pylist(rows, schema=self._schema) + for batch in table.to_batches(): + self._ffi_table.add_pyarrow(batch) + + +# TODO: We should be able to go straight from CSV to PyArrow, but +# currently that has some problems with timestamp hadling. +class CsvString(Source): + """Source reading data from CSV strings using Pandas.""" + + def __init__( + self, csv_string: str | BytesIO, *, schema: Optional[pa.Schema] = None, **kwargs + ) -> None: + """ + Create a CSV String Source. + + Parameters + ---------- + csv_string : str + The CSV string to start from. + schema : pa.Schema, optional + The schema to use. + If not provided, the schema will be inferred from the `csv_string`. + **kwargs : dict, optional + Additional keyword arguments to pass to the super class. + Should include the required column names. + + See Also + -------- + Source.__init__ : For required keyword arguments. + """ + if isinstance(csv_string, str): + csv_string = BytesIO(csv_string.encode("utf-8")) + if schema is None: + schema = pa.csv.read_csv(csv_string).schema + csv_string.seek(0) + super().__init__(schema, **kwargs) + + self._convert_options = pyarrow.csv.ConvertOptions( + column_types=schema, + strings_can_be_null=True, + ) + self.add_string(csv_string) + + def add_string(self, csv_string: str | BytesIO) -> None: + """Add data to the source.""" + if isinstance(csv_string, str): + csv_string = BytesIO(csv_string.encode("utf-8")) + content = pa.csv.read_csv(csv_string, convert_options=self._convert_options) + for batch in content.to_batches(): + self._ffi_table.add_pyarrow(batch) + + +class JsonlString(Source): + """Source reading data from line-delimited JSON strings using PyArrow.""" + + def __init__( + self, + json_string: str | BytesIO, + *, + schema: Optional[pa.Schema] = None, + **kwargs, + ) -> None: + """ + Create a JSON String Source. + + Parameters + ---------- + json_string : str + The line-delimited JSON string to start from. + schema : pa.Schema, optional + The schema to use. + If not provided, the schema will be inferred from the `csv_string`. + **kwargs : dict, optional + Additional keyword arguments to pass to the super class. + Should include the required column names. + + See Also + -------- + Source.__init__ : For required keyword arguments. + """ + if isinstance(json_string, str): + json_string = BytesIO(json_string.encode("utf-8")) + if schema is None: + schema = pa.json.read_json(json_string).schema + json_string.seek(0) + super().__init__(schema, **kwargs) + + self._parse_options = pyarrow.json.ParseOptions(explicit_schema=schema) + self.add_string(json_string) + + def add_string(self, json_string: str | BytesIO) -> None: + """Add data to the source.""" + if isinstance(json_string, str): + json_string = BytesIO(json_string.encode("utf-8")) + batches = pa.json.read_json(json_string, parse_options=self._parse_options) + for batch in batches.to_batches(): + self._ffi_table.add_pyarrow(batch) + + +class Parquet(Source): + """Source reading data from Parquet files.""" + + def __init__( + self, path: str, *, schema: Optional[pa.Schema] = None, **kwargs + ) -> None: + """ + Create a Parquet source. + + Parameters + ---------- + path : str + The path to the Parquet file to add. + schema : pa.Schema, optional + The schema to use. + If not provided, the schema will be inferred from the `csv_string`. + **kwargs : dict, optional + Additional keyword arguments to pass to the super class. + Should include the required column names. + + See Also + -------- + Source.__init__ : For required keyword arguments. + """ + if schema is None: + schema = pa.parquet.read_schema(path) + super().__init__(schema, **kwargs) + + self.add_file(path) + + def add_file(self, path: str) -> None: + """Add data to the source.""" + table = pa.parquet.read_table( + path, + schema=self._schema, + ) + for batch in table.to_batches(): + self._ffi_table.add_pyarrow(batch) diff --git a/python/pysrc/kaskada/sources/source.py b/python/pysrc/kaskada/sources/source.py new file mode 100644 index 000000000..e7849b019 --- /dev/null +++ b/python/pysrc/kaskada/sources/source.py @@ -0,0 +1,84 @@ +"""Provide the base-class for Kaskada sources.""" +from typing import Optional + +import kaskada._ffi as _ffi +import pyarrow as pa + +from .._session import _get_session +from .._timestream import Timestream + + +_TABLE_NUM: int = 0 + + +class Source(Timestream): + """A source (input) Timestream.""" + + # TODO: Clean-up naming on the FFI side. + _ffi_table: _ffi.Table + + def __init__( + self, + schema: pa.Schema, + time_column_name: str, + key_column_name: str, + subsort_column_name: Optional[str] = None, + grouping_name: Optional[str] = None, + time_unit: Optional[str] = None, + ): + """Create a new source.""" + assert isinstance(schema, pa.Schema) + + # Fix the schema. The fields should be non-nullable. + def fix_field(field: pa.Field) -> pa.Field: + if field.name in [ + time_column_name, + key_column_name, + subsort_column_name, + ]: + field = field.with_nullable(False) + if isinstance(field.type, pa.TimestampType): + field = field.with_type(pa.timestamp(field.type.unit, tz=None)) + return field + + fields = [fix_field(f) for f in schema] + schema = pa.schema(fields) + + Source._validate_column(time_column_name, schema) + Source._validate_column(key_column_name, schema) + Source._validate_column(subsort_column_name, schema) + + # Hack -- Sparrow currently requires tables be named. + global _TABLE_NUM + name = f"table{_TABLE_NUM}" + _TABLE_NUM += 1 + + ffi_table = _ffi.Table( + _get_session(), + name, + time_column_name, + key_column_name, + schema, + subsort_column_name, + grouping_name, + time_unit, + ) + super().__init__(ffi_table) + self._schema = schema + self._ffi_table = ffi_table + + @property + def name(self) -> str: + """Get the current table name.""" + return self._ffi_table.name + + # TODO: Most of these checks exist in Sparrow. We should just surface + # those errors more cleanly. + @staticmethod + def _validate_column(field_name: Optional[str], schema: pa.Schema) -> None: + if field_name is not None: + field = schema.field(field_name) + if field is None: + raise KeyError(f"Column {field_name!r} does not exist") + if field.nullable: + raise ValueError(f"Column: {field_name!r} must be non-nullable") diff --git a/python/pysrc/kaskada/udf.py b/python/pysrc/kaskada/udf.py new file mode 100644 index 000000000..05cf5e3b8 --- /dev/null +++ b/python/pysrc/kaskada/udf.py @@ -0,0 +1,57 @@ +"""Functionality for calling Python UDFs from Kaskada.""" +import functools +from typing import Callable + +import pandas as pd +import pyarrow as pa + + +# TODO: Allow functions to return `pd.DataFrame` for struct arrays. +FuncType = Callable[..., pd.Series] + + +class Udf(object): + """Class wrapping a UDF used in Kaskada.""" + + def __init__(self, name, func: FuncType, signature: str) -> None: + """Create a UDF for a function returning a Pandas series. + + Parameters + ---------- + name : str + Name of the function being wrapped. + + func : FuncType + The callable to wrap. + + signature : str + The Kaskada function signature for this UDF. Will be used to check + types of calls to this function and propagate type information for + the rest of the query. + """ + functools.update_wrapper(self, func) + self.name = name + self.func = func + self.signature = signature + + def run_pyarrow(self, result_type: pa.DataType, *args: pa.Array) -> pa.Array: + """Run the function producing the given result type.""" + # TODO: I believe this will return a series for simple arrays, and a + # dataframe for struct arrays. We should explore how this handles + # different types. + pd_args = [arg.to_pandas() for arg in args] + pd_result = self.func(*pd_args) + + if isinstance(pd_result, pd.Series): + return pa.Array.from_pandas(pd_result, type=result_type) + else: + raise TypeError(f"Unsupported result type: {type(pd_result)}") + + +def fenl_udf(name: str, signature: str): + """Decorate a function for use as a Kaskada UDF.""" + + def decorator(func: FuncType): + return Udf(name, func, signature) + + return decorator diff --git a/python/pysrc/kaskada/windows.py b/python/pysrc/kaskada/windows.py new file mode 100644 index 000000000..7dd611bf7 --- /dev/null +++ b/python/pysrc/kaskada/windows.py @@ -0,0 +1,74 @@ +"""Windows to use for Timestream aggregations.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import timedelta + +from ._timestream import Timestream + + +@dataclass(frozen=True) +class Window(object): + """Base class for window functions.""" + + +@dataclass(frozen=True) +class Since(Window): + """ + Window since the last time a predicate was true. + + Aggregations will contain all values starting from the last time the predicate + evaluated to true (inclusive). + + Parameters + ---------- + predicate : Timestream | bool + The boolean Timestream to use as predicate for the window. + Each time the predicate evaluates to true the window will be cleared. + """ + + predicate: Timestream | bool + + +@dataclass(frozen=True) +class Sliding(Window): + """ + Window for the last `duration` intervals of some `predicate`. + + Parameters + ---------- + duration : int + The number of sliding intervals to use in the window. + + predicate : Timestream | bool + The boolean Timestream to use as predicate for the window + Each time the predicate evaluates to true the window starts a new interval. + """ + + duration: int + predicate: Timestream | bool + + def __post_init__(self): + """Validate the window parameters.""" + if self.duration <= 0: + raise ValueError("duration must be positive") + + +@dataclass(frozen=True) +class Trailing(Window): + """ + Window the last `duration` time period. + + Parameters + ---------- + duration : timedelta + The duration of the window. + """ + + duration: timedelta + + def __post_init__(self): + """Validate the window parameters.""" + if self.duration <= timedelta(0): + raise ValueError("duration must be positive") diff --git a/python/pytests/aggregation/count_if_test.py b/python/pytests/aggregation/count_if_test.py new file mode 100644 index 000000000..1c38ddfb8 --- /dev/null +++ b/python/pytests/aggregation/count_if_test.py @@ -0,0 +1,62 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def count_if_source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n,is_valid", + "1996-12-19T16:39:57,A,5,10,true", + "1996-12-19T16:39:58,B,24,3,true", + "1996-12-19T16:39:59,A,17,6,false", + "1996-12-19T16:40:00,A,,9,false", + "1996-12-19T16:40:01,A,12,,true", + "1996-12-19T16:40:02,A,,,", + "1996-12-19T16:40:03,B,26,12,true", + "1996-12-19T16:40:04,B,30,1,true", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_count_if_unwindowed(count_if_source, golden) -> None: + is_valid = count_if_source.col("is_valid") + m = count_if_source.col("m") + golden.jsonl( + kd.record( + { + "is_valid": is_valid, + "count_if": is_valid.count_if(), + "m": m, + } + ) + ) + + +def test_count_if_windowed(count_if_source, golden) -> None: + is_valid = count_if_source.col("is_valid") + m = count_if_source.col("m") + golden.jsonl( + kd.record( + { + "is_valid": is_valid, + "count_if": is_valid.count_if(window=kd.windows.Since(m > 25)), + "m": m, + } + ) + ) + + +def test_count_if_since_true(count_if_source, golden) -> None: + is_valid = count_if_source.col("is_valid") + m = count_if_source.col("m") + golden.jsonl( + kd.record( + { + "is_valid": is_valid, + "count_if": is_valid.count_if(window=kd.windows.Since(True)), + "m": m, + } + ) + ) diff --git a/python/pytests/aggregation/count_test.py b/python/pytests/aggregation/count_test.py new file mode 100644 index 000000000..7131a5ca0 --- /dev/null +++ b/python/pytests/aggregation/count_test.py @@ -0,0 +1,52 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_count_unwindowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record({"m": m, "count_m": m.count(), "n": n, "count_n": n.count()}) + ) + + +def test_count_windowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "count_m": m.count(window=kd.windows.Since(m > 20)), + "n": n, + "count_n": n.count(window=kd.windows.Sliding(2, m > 10)), + } + ) + ) + + +def test_count_since_true(source, golden) -> None: + # `since(True)` should be the same as unwindowed, so equals to one whenever the value is non-null + m_sum_since_true = kd.record( + { + "m": source.col("m"), + "m_count": source.col("m").count(window=kd.windows.Since(True)), + } + ) + golden.jsonl(m_sum_since_true) diff --git a/python/pytests/aggregation/max_test.py b/python/pytests/aggregation/max_test.py new file mode 100644 index 000000000..bcf668757 --- /dev/null +++ b/python/pytests/aggregation/max_test.py @@ -0,0 +1,50 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_max_unwindowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl(kd.record({"m": m, "max_m": m.max(), "n": n, "max_n": n.max()})) + + +def test_max_windowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "max_m": m.max(window=kd.windows.Since(m > 20)), + "n": n, + "max_n": n.max(window=kd.windows.Sliding(2, m > 10)), + } + ) + ) + + +def test_max_since_true(source, golden) -> None: + # `since(True)` should be the same as unwindowed, so equals the original vaule. + m_max_since_true = kd.record( + { + "m": source.col("m"), + "m_max": source.col("m").max(window=kd.windows.Since(True)), + } + ) + golden.jsonl(m_max_since_true) diff --git a/python/pytests/aggregation/mean_test.py b/python/pytests/aggregation/mean_test.py new file mode 100644 index 000000000..688c35963 --- /dev/null +++ b/python/pytests/aggregation/mean_test.py @@ -0,0 +1,50 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_mean_unwindowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl(kd.record({"m": m, "mean_m": m.mean(), "n": n, "mean_n": n.mean()})) + + +def test_mean_windowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "mean_m": m.mean(window=kd.windows.Since(m > 20)), + "n": n, + "mean_n": n.mean(window=kd.windows.Sliding(2, m > 10)), + } + ) + ) + + +def test_mean_since_true(source, golden) -> None: + # `since(True)` should be the same as unwindowed, so equals the original vaule. + m_mean_since_true = kd.record( + { + "m": source.col("m"), + "m_mean": source.col("m").mean(window=kd.windows.Since(True)), + } + ) + golden.jsonl(m_mean_since_true) diff --git a/python/pytests/aggregation/min_test.py b/python/pytests/aggregation/min_test.py new file mode 100644 index 000000000..57be7dbe0 --- /dev/null +++ b/python/pytests/aggregation/min_test.py @@ -0,0 +1,50 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_min_unwindowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl(kd.record({"m": m, "min_m": m.min(), "n": n, "min_n": n.min()})) + + +def test_min_windowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "min_m": m.min(window=kd.windows.Since(m > 20)), + "n": n, + "min_n": n.min(window=kd.windows.Sliding(2, m > 10)), + } + ) + ) + + +def test_min_since_true(source, golden) -> None: + # `since(True)` should be the same as unwindowed, so equals the original vaule. + m_min_since_true = kd.record( + { + "m": source.col("m"), + "m_min": source.col("m").min(window=kd.windows.Since(True)), + } + ) + golden.jsonl(m_min_since_true) diff --git a/python/pytests/aggregation/stddev_test.py b/python/pytests/aggregation/stddev_test.py new file mode 100644 index 000000000..eb473000c --- /dev/null +++ b/python/pytests/aggregation/stddev_test.py @@ -0,0 +1,52 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_stddev_unwindowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record({"m": m, "stddev_m": m.stddev(), "n": n, "stddev_n": n.stddev()}) + ) + + +def test_stddev_windowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "stddev_m": m.stddev(window=kd.windows.Since(m > 20)), + "n": n, + "stddev_n": n.stddev(window=kd.windows.Sliding(2, m > 10)), + } + ) + ) + + +def test_stddev_since_true(source, golden) -> None: + # `since(True)` should be the same as unwindowed, so equals the original vaule. + m_stddev_since_true = kd.record( + { + "m": source.col("m"), + "m_stddev": source.col("m").stddev(window=kd.windows.Since(True)), + } + ) + golden.jsonl(m_stddev_since_true) diff --git a/python/pytests/aggregation/sum_test.py b/python/pytests/aggregation/sum_test.py new file mode 100644 index 000000000..8cac84194 --- /dev/null +++ b/python/pytests/aggregation/sum_test.py @@ -0,0 +1,50 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_sum_unwindowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl(kd.record({"m": m, "sum_m": m.sum(), "n": n, "sum_n": n.sum()})) + + +def test_sum_windowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "sum_m": m.sum(window=kd.windows.Since(m > 20)), + "n": n, + "sum_n": n.sum(window=kd.windows.Sliding(2, m > 10)), + } + ) + ) + + +def test_sum_since_true(source, golden) -> None: + # `since(True)` should be the same as unwindowed, so equals the original vaule. + m_sum_since_true = kd.record( + { + "m": source.col("m"), + "m_sum": source.col("m").sum(window=kd.windows.Since(True)), + } + ) + golden.jsonl(m_sum_since_true) diff --git a/python/pytests/aggregation/variance_test.py b/python/pytests/aggregation/variance_test.py new file mode 100644 index 000000000..3fa28e66f --- /dev/null +++ b/python/pytests/aggregation/variance_test.py @@ -0,0 +1,54 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_variance_unwindowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + {"m": m, "variance_m": m.variance(), "n": n, "variance_n": n.variance()} + ) + ) + + +def test_variance_windowed(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "variance_m": m.variance(window=kd.windows.Since(m > 20)), + "n": n, + "variance_n": n.variance(window=kd.windows.Sliding(2, m > 10)), + } + ) + ) + + +def test_variance_since_true(source, golden) -> None: + # `since(True)` should be the same as unwindowed, so equals the original vaule. + m_variance_since_true = kd.record( + { + "m": source.col("m"), + "m_variance": source.col("m").variance(window=kd.windows.Since(True)), + } + ) + golden.jsonl(m_variance_since_true) diff --git a/python/pytests/collect_test.py b/python/pytests/collect_test.py new file mode 100644 index 000000000..1a6c85bf8 --- /dev/null +++ b/python/pytests/collect_test.py @@ -0,0 +1,349 @@ +from datetime import timedelta + +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n,s,b", + "1996-12-19T16:39:57,A,5,10,a,true", + "1996-12-19T16:39:58,B,24,3,b,true", + "1996-12-19T16:39:59,A,17,6,b,", + "1996-12-19T16:40:00,A,,9,,false", + "1996-12-19T16:40:01,A,12,,e,false", + "1996-12-19T16:40:02,A,,,f,true", + "1996-12-19T16:40:04,A,,,f,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_collect_basic(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "collect_m": m.collect(max=None), + "n": n, + "collect_n": n.collect(max=None), + } + ) + ) + + +def test_collect_with_max(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "collect_m_max_2": m.collect(max=2), + "n": n, + "collect_n_max_2": n.collect(max=2), + } + ) + ) + + +def test_collect_with_min(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "collect_m_min_2": m.collect(min=2, max=None), + "n": n, + "collect_n_min_2": n.collect(min=2, max=None), + } + ) + ) + + +def test_collect_with_min_and_max(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "collect_m_min_2_max_2": m.collect(min=2, max=2), + "n": n, + "collect_n_min_2_max_2": n.collect(min=2, max=2), + } + ) + ) + + +def test_collect_since_window(source, golden) -> None: + m = source.col("m") + golden.jsonl( + kd.record( + {"m": m, "since_m": m.collect(max=None, window=kd.windows.Since(m > 10))} + ) + ) + + +def test_collect_i64_trailing_window_1s(source, golden) -> None: + m = source.col("m") + golden.jsonl( + kd.record( + { + "m": m, + "collect_m": m.collect( + max=None, window=kd.windows.Trailing(timedelta(seconds=1)) + ), + } + ) + ) + + +def test_collect_i64_trailing_window_3s(source, golden) -> None: + m = source.col("m") + golden.jsonl( + kd.record( + { + "m": m, + "collect_m": m.collect( + max=None, window=kd.windows.Trailing(timedelta(seconds=3)) + ), + } + ) + ) + + +def test_collect_i64_trailing_window_3s_with_max(source, golden) -> None: + m = source.col("m") + golden.jsonl( + kd.record( + { + "m": m, + "collect_m": m.collect( + max=2, window=kd.windows.Trailing(timedelta(seconds=3)) + ), + } + ) + ) + + +def test_collect_i64_trailing_window_3s_with_min(source, golden) -> None: + m = source.col("m") + golden.jsonl( + kd.record( + { + "m": m, + "collect_m": m.collect( + min=3, max=None, window=kd.windows.Trailing(timedelta(seconds=3)) + ), + } + ) + ) + + +def test_collect_string_trailing_window_1s(source, golden) -> None: + s = source.col("s") + golden.jsonl( + kd.record( + { + "m": s, + "collect_s": s.collect( + max=None, window=kd.windows.Trailing(timedelta(seconds=1)) + ), + } + ) + ) + + +def test_collect_string_trailing_window_3s(source, golden) -> None: + s = source.col("s") + golden.jsonl( + kd.record( + { + "s": s, + "collect_s": s.collect( + max=None, window=kd.windows.Trailing(timedelta(seconds=3)) + ), + } + ) + ) + + +def test_collect_string_trailing_window_3s_with_max(source, golden) -> None: + s = source.col("s") + golden.jsonl( + kd.record( + { + "s": s, + "collect_s": s.collect( + max=2, window=kd.windows.Trailing(timedelta(seconds=3)) + ), + } + ) + ) + + +def test_collect_string_trailing_window_3s_with_min(source, golden) -> None: + s = source.col("s") + golden.jsonl( + kd.record( + { + "s": s, + "collect_s": s.collect( + min=3, max=None, window=kd.windows.Trailing(timedelta(seconds=3)) + ), + } + ) + ) + + +def test_collect_bool_trailing_window_1s(source, golden) -> None: + b = source.col("b") + golden.jsonl( + kd.record( + { + "b": b, + "collect_b": b.collect( + max=None, window=kd.windows.Trailing(timedelta(seconds=1)) + ), + } + ) + ) + + +def test_collect_bool_trailing_window_3s(source, golden) -> None: + b = source.col("b") + golden.jsonl( + kd.record( + { + "b": b, + "collect_b": b.collect( + max=None, window=kd.windows.Trailing(timedelta(seconds=3)) + ), + } + ) + ) + + +def test_collect_bool_trailing_window_3s_with_max(source, golden) -> None: + b = source.col("b") + golden.jsonl( + kd.record( + { + "b": b, + "collect_b": b.collect( + max=2, window=kd.windows.Trailing(timedelta(seconds=3)) + ), + } + ) + ) + + +def test_collect_bool_trailing_window_3s_with_min(source, golden) -> None: + b = source.col("b") + golden.jsonl( + kd.record( + { + "b": b, + "collect_b": b.collect( + min=3, max=None, window=kd.windows.Trailing(timedelta(seconds=3)) + ), + } + ) + ) + + +# Currently, the Pandas comparison method being used doesn't handle +# date-time like fields nested within a list. So we expand things out. +# +# TODO: Improve the golden testing so this isn't necessary. +def test_collect_struct_trailing_window_1s(source, golden) -> None: + collect = source.collect(max=None, window=kd.windows.Trailing(timedelta(seconds=1))) + golden.jsonl( + kd.record( + { + "f0": collect[0].col("time"), + "f1": collect[1].col("time"), + "f2": collect[2].col("time"), + "f3": collect[3].col("time"), + "f4": collect[4].col("time"), + } + ) + ) + + +def test_collect_struct_trailing_window_3s(source, golden) -> None: + collect = source.collect(max=None, window=kd.windows.Trailing(timedelta(seconds=3))) + golden.jsonl( + kd.record( + { + "f0": collect[0].col("time"), + "f1": collect[1].col("time"), + "f2": collect[2].col("time"), + "f3": collect[3].col("time"), + "f4": collect[4].col("time"), + } + ) + ) + + +def test_collect_struct_trailing_window_3s_with_max(source, golden) -> None: + collect = source.collect(max=2, window=kd.windows.Trailing(timedelta(seconds=3))) + golden.jsonl( + kd.record( + { + "f0": collect[0].col("time"), + "f1": collect[1].col("time"), + "f2": collect[2].col("time"), + "f3": collect[3].col("time"), + "f4": collect[4].col("time"), + } + ) + ) + + +def test_collect_struct_trailing_window_3s_with_min(source, golden) -> None: + collect = source.collect( + min=3, max=None, window=kd.windows.Trailing(timedelta(seconds=3)) + ) + golden.jsonl( + kd.record( + { + "f0": collect[0].col("time"), + "f1": collect[1].col("time"), + "f2": collect[2].col("time"), + "f3": collect[3].col("time"), + "f4": collect[4].col("time"), + } + ) + ) + + +def test_collect_records(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl(kd.record({"m": m, "n": n}).collect(max=None)) + + +def test_collect_records_field_ref(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl(kd.record({"m": m, "n": n}).collect(max=None).col("m")) + + +def test_collect_lists(source, golden) -> None: + m = source.col("m") + golden.jsonl( + kd.record( + { + "m": m, + "list_m": m.collect(max=10), + "collect_list": m.collect(max=10).collect(max=10), + } + ) + ) diff --git a/python/pytests/conftest.py b/python/pytests/conftest.py new file mode 100644 index 000000000..d32078432 --- /dev/null +++ b/python/pytests/conftest.py @@ -0,0 +1,118 @@ +import os +from typing import Union + +import kaskada as kd +import pandas as pd +import pyarrow as pa +import pytest +from kaskada import init_session + + +@pytest.fixture(autouse=True, scope="session") +def session() -> None: + init_session() + + +def pytest_addoption(parser: pytest.Parser): + parser.addoption("--save-golden", action="store_true", help="update golden files") + + +class GoldenFixture(object): + def __init__(self, dirname: str, test_name: str, save: bool): + self._output = 0 + self._dirname = dirname + self._test_name = test_name + self._save = save + + def jsonl(self, data: Union[kd.Timestream, pd.DataFrame]) -> None: + """Golden test against newline-delimited JSON file (json-lines).""" + df = _data_to_dataframe(data) + filename = self._filename("jsonl") + + if self._save: + df.to_json( + filename, + orient="records", + lines=True, + date_format="iso", + date_unit="ns", + ) + + golden = pd.read_json( + filename, + orient="records", + lines=True, + dtype=df.dtypes.to_dict(), + date_unit="ns", + ) + + pd.testing.assert_frame_equal(df, golden, check_datetimelike_compat=True) + + def parquet(self, data: Union[kd.Timestream, pd.DataFrame]) -> None: + """Golden test against Parquet file.""" + df = _data_to_dataframe(data) + filename = self._filename("parquet") + + if self._save: + df.to_parquet(filename) + + golden = pd.read_parquet(filename) + + pd.testing.assert_frame_equal(df, golden) + + def _filename(self, suffix: str) -> str: + filename = ( + f"{self._test_name}.{suffix}" + if self._output == 0 + else f"{self._test_name}_{self._output}.{suffix}" + ) + filename = os.path.join(self._dirname, filename) + self._output += 1 + + if not self._save: + assert os.path.exists( + filename + ), f"Golden file {filename} does not exist. Run with `--save-golden` to create it." + return filename + + +def _data_to_dataframe(data: Union[kd.Timestream, pd.DataFrame]) -> pd.DataFrame: + if isinstance(data, pd.DataFrame): + return data + elif isinstance(data, kd.Timestream): + return data.run().to_pandas() + else: + raise ValueError(f"data must be a Timestream or a DataFrame, was {type(data)}") + + +def _data_to_pyarrow( + data: Union[kd.Timestream, pa.RecordBatch, pa.Table] +) -> Union[pa.RecordBatch, pa.Table]: + if isinstance(data, kd.Timestream): + return data.run().to_pyarrow() + elif isinstance(data, pa.RecordBatch) or isinstance(data, pa.Table): + return data + else: + raise ValueError( + f"data must be a Timestream, RecordBatch, or Table, was {type(data)}" + ) + + +@pytest.fixture +def golden( + request: pytest.FixtureRequest, pytestconfig: pytest.Config +) -> GoldenFixture: + """Test fixture for checking results against a golden file.""" + test_name = request.node.name + module_name = request.node.module.__name__ + dirname = os.path.join("pytests", "golden", module_name) + + save = pytestconfig.getoption("--save-golden", default=False) + if save: + os.makedirs(dirname, exist_ok=True) + else: + assert os.path.isdir( + dirname + ), f"golden directory {dirname} does not exist. run with `--save-golden` to create it." + + return GoldenFixture(dirname, test_name, save) diff --git a/python/pytests/csv_string_source_test.py b/python/pytests/csv_string_source_test.py new file mode 100644 index 000000000..777ae1f31 --- /dev/null +++ b/python/pytests/csv_string_source_test.py @@ -0,0 +1,35 @@ +import kaskada as kd + + +def test_read_csv(golden) -> None: + content1 = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + content2 = "\n".join( + [ + "time,key,m,n", + "1996-12-19T17:39:57,A,5,10", + "1996-12-19T17:39:58,B,24,3", + "1996-12-19T17:39:59,A,17,6", + "1996-12-19T17:40:00,A,,9", + "1996-12-19T17:40:01,A,12,", + "1996-12-19T17:40:02,A,,", + ] + ) + source = kd.sources.CsvString( + content1, + time_column_name="time", + key_column_name="key", + ) + golden.jsonl(source) + + source.add_string(content2) + golden.jsonl(source) diff --git a/python/pytests/else_test.py b/python/pytests/else_test.py new file mode 100644 index 000000000..04b1388b4 --- /dev/null +++ b/python/pytests/else_test.py @@ -0,0 +1,65 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n,default", + "1996-12-19T16:39:57,A,5,10,-1", + "1996-12-19T16:39:58,B,24,3,-1", + "1996-12-19T16:39:59,A,17,6,-1", + "1996-12-19T16:40:00,A,,9,-1", + "1996-12-19T16:40:01,A,12,,-1", + "1996-12-19T16:40:02,A,,,-1", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_else_(source, golden) -> None: + m = source.col("m") + n = source.col("n") + default = source.col("default") + condition_m = m > 15 + condition_n = n > 5 + golden.jsonl( + kd.record( + { + "m": m, + "condition_m": condition_m, + "if_else_m": m.if_(condition_m).else_(default), + "n": n, + "condition_n": condition_n, + "if_n": n.if_(condition_n).else_(default), + } + ) + ) + + +@pytest.fixture(scope="module") +def record_source() -> kd.sources.JsonlString: + content = "\n".join( + [ + """{"time":"1996-12-19T16:39:57","key":"A","override": {"test":"override_val"}}""", + """{"time":"1996-12-19T16:39:58","key":"A","default_record":{"test":"default"}}""", + ] + ) + return kd.sources.JsonlString( + content, time_column_name="time", key_column_name="key" + ) + + +def test_else_debug(record_source, golden) -> None: + default_record = record_source.col("default_record") + override_column = record_source.col("override") + golden.jsonl( + kd.record( + { + "default_record": default_record, + "overide": override_column, + "override_else_default": override_column.else_(default_record), + } + ) + ) diff --git a/python/pytests/filter_test.py b/python/pytests/filter_test.py new file mode 100644 index 000000000..30144c779 --- /dev/null +++ b/python/pytests/filter_test.py @@ -0,0 +1,37 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_filter(source, golden) -> None: + m = source.col("m") + n = source.col("n") + condition_m = m > 15 + condition_n = n > 5 + golden.jsonl( + kd.record( + { + "m": m, + "condition_m": condition_m, + "filter_m": m.filter(condition_m), + "n": n, + "condition_n": condition_n, + "filter_n": n.filter(condition_n), + } + ) + ) diff --git a/python/pytests/flatten_test.py b/python/pytests/flatten_test.py new file mode 100644 index 000000000..bdf5cc7dd --- /dev/null +++ b/python/pytests/flatten_test.py @@ -0,0 +1,17 @@ +import kaskada as kd + + +def test_flatten(golden) -> None: + source = kd.sources.PyList( + [ + {"time": "1996-12-19T16:39:57", "user": "A", "m": [[5]]}, + {"time": "1996-12-19T17:39:57", "user": "A", "m": []}, + {"time": "1996-12-19T18:39:57", "user": "A", "m": [None]}, + {"time": "1996-12-19T19:39:57", "user": "A", "m": [[6], [7]]}, + {"time": "1996-12-19T19:39:57", "user": "A", "m": [[7, 8], [9, 10]]}, + ], + time_column_name="time", + key_column_name="user", + ) + + golden.jsonl(source.col("m").flatten()) diff --git a/python/pytests/golden/collect_test/test_collect_basic.jsonl b/python/pytests/golden/collect_test/test_collect_basic.jsonl new file mode 100644 index 000000000..71e734759 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_basic.jsonl @@ -0,0 +1,7 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"collect_m":[5],"n":10.0,"collect_n":[10]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"collect_m":[24],"n":3.0,"collect_n":[3]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"collect_m":[5,17],"n":6.0,"collect_n":[10,6]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m":[5,17],"n":9.0,"collect_n":[10,6,9]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"collect_m":[5,17,12],"n":null,"collect_n":[10,6,9]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m":[5,17,12],"n":null,"collect_n":[10,6,9]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"collect_m":[5,17,12],"n":null,"collect_n":[10,6,9]} diff --git a/python/pytests/golden/collect_test/test_collect_bool_trailing_window_1s.jsonl b/python/pytests/golden/collect_test/test_collect_bool_trailing_window_1s.jsonl new file mode 100644 index 000000000..2a0536f9c --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_bool_trailing_window_1s.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","b":true,"collect_b":[true]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"A","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","b":true,"collect_b":[true]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"B","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","b":false,"collect_b":[false]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","b":false,"collect_b":[false]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","b":true,"collect_b":[true]} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","b":null,"collect_b":[]} diff --git a/python/pytests/golden/collect_test/test_collect_bool_trailing_window_3s.jsonl b/python/pytests/golden/collect_test/test_collect_bool_trailing_window_3s.jsonl new file mode 100644 index 000000000..e67cc0219 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_bool_trailing_window_3s.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","b":true,"collect_b":[true]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","b":true,"collect_b":[true]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","b":null,"collect_b":[true]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","b":false,"collect_b":[false]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","b":false,"collect_b":[false,false]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","b":null,"collect_b":[false,false]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","b":true,"collect_b":[false,false,true]} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","b":null,"collect_b":[false,true]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","b":null,"collect_b":[true]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","b":null,"collect_b":[true]} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","b":null,"collect_b":[]} diff --git a/python/pytests/golden/collect_test/test_collect_bool_trailing_window_3s_with_max.jsonl b/python/pytests/golden/collect_test/test_collect_bool_trailing_window_3s_with_max.jsonl new file mode 100644 index 000000000..1f30807dc --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_bool_trailing_window_3s_with_max.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","b":true,"collect_b":[true]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","b":true,"collect_b":[true]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","b":null,"collect_b":[true]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","b":false,"collect_b":[false]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","b":false,"collect_b":[false,false]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","b":null,"collect_b":[false,false]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","b":true,"collect_b":[false,true]} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","b":null,"collect_b":[false,true]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","b":null,"collect_b":[true]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","b":null,"collect_b":[true]} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","b":null,"collect_b":[]} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","b":null,"collect_b":[]} diff --git a/python/pytests/golden/collect_test/test_collect_bool_trailing_window_3s_with_min.jsonl b/python/pytests/golden/collect_test/test_collect_bool_trailing_window_3s_with_min.jsonl new file mode 100644 index 000000000..42ac39085 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_bool_trailing_window_3s_with_min.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","b":true,"collect_b":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","b":true,"collect_b":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","b":null,"collect_b":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","b":null,"collect_b":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","b":false,"collect_b":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","b":null,"collect_b":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","b":false,"collect_b":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","b":null,"collect_b":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","b":true,"collect_b":[false,false,true]} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","b":null,"collect_b":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","b":null,"collect_b":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","b":null,"collect_b":null} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","b":null,"collect_b":null} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","b":null,"collect_b":null} diff --git a/python/pytests/golden/collect_test/test_collect_i64_trailing_window_1s.jsonl b/python/pytests/golden/collect_test/test_collect_i64_trailing_window_1s.jsonl new file mode 100644 index 000000000..80fce4273 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_i64_trailing_window_1s.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"collect_m":[5]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"collect_m":[24]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"B","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"collect_m":[17]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"collect_m":[12]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","m":null,"collect_m":[]} diff --git a/python/pytests/golden/collect_test/test_collect_i64_trailing_window_3s.jsonl b/python/pytests/golden/collect_test/test_collect_i64_trailing_window_3s.jsonl new file mode 100644 index 000000000..8c5bfc3e4 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_i64_trailing_window_3s.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"collect_m":[5]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"collect_m":[24]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"collect_m":[5,17]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m":[17]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m":[17]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"collect_m":[17,12]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m":[12]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m":[12]} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","m":null,"collect_m":[12]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","m":null,"collect_m":[]} diff --git a/python/pytests/golden/collect_test/test_collect_i64_trailing_window_3s_with_max.jsonl b/python/pytests/golden/collect_test/test_collect_i64_trailing_window_3s_with_max.jsonl new file mode 100644 index 000000000..8c5bfc3e4 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_i64_trailing_window_3s_with_max.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"collect_m":[5]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"collect_m":[24]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"collect_m":[5,17]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m":[17]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m":[17]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"collect_m":[17,12]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m":[12]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m":[12]} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","m":null,"collect_m":[12]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","m":null,"collect_m":[]} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","m":null,"collect_m":[]} diff --git a/python/pytests/golden/collect_test/test_collect_i64_trailing_window_3s_with_min.jsonl b/python/pytests/golden/collect_test/test_collect_i64_trailing_window_3s_with_min.jsonl new file mode 100644 index 000000000..efad0a0a5 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_i64_trailing_window_3s_with_min.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"collect_m":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"collect_m":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"collect_m":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","m":null,"collect_m":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"collect_m":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m":null} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","m":null,"collect_m":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"collect_m":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"collect_m":null} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","m":null,"collect_m":null} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","m":null,"collect_m":null} diff --git a/python/pytests/golden/collect_test/test_collect_lists.jsonl b/python/pytests/golden/collect_test/test_collect_lists.jsonl new file mode 100644 index 000000000..20dabdc07 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_lists.jsonl @@ -0,0 +1,7 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"list_m":[5],"collect_list":[[5]]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"list_m":[24],"collect_list":[[24]]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"list_m":[5,17],"collect_list":[[5],[5,17]]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"list_m":[5,17],"collect_list":[[5],[5,17],[5,17]]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"list_m":[5,17,12],"collect_list":[[5],[5,17],[5,17],[5,17,12]]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"list_m":[5,17,12],"collect_list":[[5],[5,17],[5,17],[5,17,12],[5,17,12]]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"list_m":[5,17,12],"collect_list":[[5],[5,17],[5,17],[5,17,12],[5,17,12],[5,17,12]]} diff --git a/python/pytests/golden/collect_test/test_collect_records.jsonl b/python/pytests/golden/collect_test/test_collect_records.jsonl new file mode 100644 index 000000000..f248c7823 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_records.jsonl @@ -0,0 +1,7 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","result":[{"m":5.0,"n":10.0}]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","result":[{"m":24.0,"n":3.0}]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","result":[{"m":5.0,"n":10.0},{"m":17.0,"n":6.0}]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","result":[{"m":5.0,"n":10.0},{"m":17.0,"n":6.0},{"m":null,"n":9.0}]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","result":[{"m":5.0,"n":10.0},{"m":17.0,"n":6.0},{"m":null,"n":9.0},{"m":12.0,"n":null}]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","result":[{"m":5.0,"n":10.0},{"m":17.0,"n":6.0},{"m":null,"n":9.0},{"m":12.0,"n":null},{"m":null,"n":null}]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","result":[{"m":5.0,"n":10.0},{"m":17.0,"n":6.0},{"m":null,"n":9.0},{"m":12.0,"n":null},{"m":null,"n":null},{"m":null,"n":null}]} diff --git a/python/pytests/golden/collect_test/test_collect_records_field_ref.jsonl b/python/pytests/golden/collect_test/test_collect_records_field_ref.jsonl new file mode 100644 index 000000000..69763270d --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_records_field_ref.jsonl @@ -0,0 +1,7 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","result":[5.0]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","result":[24.0]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","result":[5.0,17.0]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","result":[5.0,17.0,null]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","result":[5.0,17.0,null,12.0]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","result":[5.0,17.0,null,12.0,null]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","result":[5.0,17.0,null,12.0,null,null]} diff --git a/python/pytests/golden/collect_test/test_collect_since_window.jsonl b/python/pytests/golden/collect_test/test_collect_since_window.jsonl new file mode 100644 index 000000000..a0153e2d0 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_since_window.jsonl @@ -0,0 +1,7 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"since_m":[5]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"since_m":[24]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"since_m":[5,17]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"since_m":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"since_m":[12]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"since_m":[]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"since_m":[]} diff --git a/python/pytests/golden/collect_test/test_collect_string_trailing_window_1s.jsonl b/python/pytests/golden/collect_test/test_collect_string_trailing_window_1s.jsonl new file mode 100644 index 000000000..05ed323b6 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_string_trailing_window_1s.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":"a","collect_s":["a"]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"A","m":null,"collect_s":[]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":"b","collect_s":["b"]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"B","m":null,"collect_s":[]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":"b","collect_s":["b"]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_s":[]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_s":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":null,"collect_s":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":"e","collect_s":["e"]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_s":[]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":"f","collect_s":["f"]} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","m":null,"collect_s":[]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":"f","collect_s":["f"]} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","m":null,"collect_s":[]} diff --git a/python/pytests/golden/collect_test/test_collect_string_trailing_window_3s.jsonl b/python/pytests/golden/collect_test/test_collect_string_trailing_window_3s.jsonl new file mode 100644 index 000000000..1b41d2c83 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_string_trailing_window_3s.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","s":"a","collect_s":["a"]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","s":"b","collect_s":["b"]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","s":"b","collect_s":["a","b"]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","s":null,"collect_s":["b"]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","s":null,"collect_s":["b"]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","s":null,"collect_s":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","s":"e","collect_s":["b","e"]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","s":null,"collect_s":["e"]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","s":"f","collect_s":["e","f"]} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","s":null,"collect_s":["e","f"]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","s":null,"collect_s":["f"]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","s":"f","collect_s":["f","f"]} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","s":null,"collect_s":["f"]} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","s":null,"collect_s":[]} diff --git a/python/pytests/golden/collect_test/test_collect_string_trailing_window_3s_with_max.jsonl b/python/pytests/golden/collect_test/test_collect_string_trailing_window_3s_with_max.jsonl new file mode 100644 index 000000000..1b41d2c83 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_string_trailing_window_3s_with_max.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","s":"a","collect_s":["a"]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","s":"b","collect_s":["b"]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","s":"b","collect_s":["a","b"]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","s":null,"collect_s":["b"]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","s":null,"collect_s":["b"]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","s":null,"collect_s":[]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","s":"e","collect_s":["b","e"]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","s":null,"collect_s":["e"]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","s":"f","collect_s":["e","f"]} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","s":null,"collect_s":["e","f"]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","s":null,"collect_s":["f"]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","s":"f","collect_s":["f","f"]} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","s":null,"collect_s":["f"]} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","s":null,"collect_s":[]} diff --git a/python/pytests/golden/collect_test/test_collect_string_trailing_window_3s_with_min.jsonl b/python/pytests/golden/collect_test/test_collect_string_trailing_window_3s_with_min.jsonl new file mode 100644 index 000000000..32f5cb57a --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_string_trailing_window_3s_with_min.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","s":"a","collect_s":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","s":"b","collect_s":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","s":"b","collect_s":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","s":null,"collect_s":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","s":null,"collect_s":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","s":null,"collect_s":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","s":"e","collect_s":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","s":null,"collect_s":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","s":"f","collect_s":null} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","s":null,"collect_s":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","s":null,"collect_s":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","s":"f","collect_s":null} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","s":null,"collect_s":null} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","s":null,"collect_s":null} diff --git a/python/pytests/golden/collect_test/test_collect_struct_trailing_window_1s.jsonl b/python/pytests/golden/collect_test/test_collect_struct_trailing_window_1s.jsonl new file mode 100644 index 000000000..0722e2bb1 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_struct_trailing_window_1s.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","f0":"1996-12-19T16:39:57.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","f0":"1996-12-19T16:39:58.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"B","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","f0":"1996-12-19T16:39:59.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","f0":"1996-12-19T16:40:00.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","f0":"1996-12-19T16:40:01.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","f0":"1996-12-19T16:40:02.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","f0":"1996-12-19T16:40:04.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} diff --git a/python/pytests/golden/collect_test/test_collect_struct_trailing_window_3s.jsonl b/python/pytests/golden/collect_test/test_collect_struct_trailing_window_3s.jsonl new file mode 100644 index 000000000..e3b37c8ba --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_struct_trailing_window_3s.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","f0":"1996-12-19T16:39:57.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","f0":"1996-12-19T16:39:58.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","f0":"1996-12-19T16:39:57.000000000","f1":"1996-12-19T16:39:59.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","f0":"1996-12-19T16:39:59.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","f0":"1996-12-19T16:39:59.000000000","f1":"1996-12-19T16:40:00.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","f0":"1996-12-19T16:39:59.000000000","f1":"1996-12-19T16:40:00.000000000","f2":"1996-12-19T16:40:01.000000000","f3":null,"f4":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","f0":"1996-12-19T16:40:00.000000000","f1":"1996-12-19T16:40:01.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","f0":"1996-12-19T16:40:00.000000000","f1":"1996-12-19T16:40:01.000000000","f2":"1996-12-19T16:40:02.000000000","f3":null,"f4":null} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","f0":"1996-12-19T16:40:01.000000000","f1":"1996-12-19T16:40:02.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","f0":"1996-12-19T16:40:02.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","f0":"1996-12-19T16:40:02.000000000","f1":"1996-12-19T16:40:04.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","f0":"1996-12-19T16:40:04.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} diff --git a/python/pytests/golden/collect_test/test_collect_struct_trailing_window_3s_with_max.jsonl b/python/pytests/golden/collect_test/test_collect_struct_trailing_window_3s_with_max.jsonl new file mode 100644 index 000000000..5e8a46d63 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_struct_trailing_window_3s_with_max.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","f0":"1996-12-19T16:39:57.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","f0":"1996-12-19T16:39:58.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","f0":"1996-12-19T16:39:57.000000000","f1":"1996-12-19T16:39:59.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","f0":"1996-12-19T16:39:59.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","f0":"1996-12-19T16:39:59.000000000","f1":"1996-12-19T16:40:00.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","f0":"1996-12-19T16:40:00.000000000","f1":"1996-12-19T16:40:01.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","f0":"1996-12-19T16:40:00.000000000","f1":"1996-12-19T16:40:01.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","f0":"1996-12-19T16:40:01.000000000","f1":"1996-12-19T16:40:02.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","f0":"1996-12-19T16:40:01.000000000","f1":"1996-12-19T16:40:02.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","f0":"1996-12-19T16:40:02.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","f0":"1996-12-19T16:40:02.000000000","f1":"1996-12-19T16:40:04.000000000","f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","f0":"1996-12-19T16:40:04.000000000","f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} diff --git a/python/pytests/golden/collect_test/test_collect_struct_trailing_window_3s_with_min.jsonl b/python/pytests/golden/collect_test/test_collect_struct_trailing_window_3s_with_min.jsonl new file mode 100644 index 000000000..7db5c802c --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_struct_trailing_window_3s_with_min.jsonl @@ -0,0 +1,14 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"B","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","f0":"1996-12-19T16:39:59.000000000","f1":"1996-12-19T16:40:00.000000000","f2":"1996-12-19T16:40:01.000000000","f3":null,"f4":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","f0":"1996-12-19T16:40:00.000000000","f1":"1996-12-19T16:40:01.000000000","f2":"1996-12-19T16:40:02.000000000","f3":null,"f4":null} +{"_time":"1996-12-19T16:40:03.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:05.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","f0":null,"f1":null,"f2":null,"f3":null,"f4":null} diff --git a/python/pytests/golden/collect_test/test_collect_with_max.jsonl b/python/pytests/golden/collect_test/test_collect_with_max.jsonl new file mode 100644 index 000000000..a5ff3086e --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_with_max.jsonl @@ -0,0 +1,7 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"collect_m_max_2":[5],"n":10.0,"collect_n_max_2":[10]} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"collect_m_max_2":[24],"n":3.0,"collect_n_max_2":[3]} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"collect_m_max_2":[5,17],"n":6.0,"collect_n_max_2":[10,6]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m_max_2":[5,17],"n":9.0,"collect_n_max_2":[6,9]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"collect_m_max_2":[17,12],"n":null,"collect_n_max_2":[6,9]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m_max_2":[17,12],"n":null,"collect_n_max_2":[6,9]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"collect_m_max_2":[17,12],"n":null,"collect_n_max_2":[6,9]} diff --git a/python/pytests/golden/collect_test/test_collect_with_min.jsonl b/python/pytests/golden/collect_test/test_collect_with_min.jsonl new file mode 100644 index 000000000..0c9acfa7f --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_with_min.jsonl @@ -0,0 +1,7 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"collect_m_min_2":null,"n":10.0,"collect_n_min_2":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"collect_m_min_2":null,"n":3.0,"collect_n_min_2":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"collect_m_min_2":[5,17],"n":6.0,"collect_n_min_2":[10,6]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m_min_2":[5,17],"n":9.0,"collect_n_min_2":[10,6,9]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"collect_m_min_2":[5,17,12],"n":null,"collect_n_min_2":[10,6,9]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m_min_2":[5,17,12],"n":null,"collect_n_min_2":[10,6,9]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"collect_m_min_2":[5,17,12],"n":null,"collect_n_min_2":[10,6,9]} diff --git a/python/pytests/golden/collect_test/test_collect_with_min_and_max.jsonl b/python/pytests/golden/collect_test/test_collect_with_min_and_max.jsonl new file mode 100644 index 000000000..ca9d14c72 --- /dev/null +++ b/python/pytests/golden/collect_test/test_collect_with_min_and_max.jsonl @@ -0,0 +1,7 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"collect_m_min_2_max_2":null,"n":10.0,"collect_n_min_2_max_2":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"collect_m_min_2_max_2":null,"n":3.0,"collect_n_min_2_max_2":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"collect_m_min_2_max_2":[5,17],"n":6.0,"collect_n_min_2_max_2":[10,6]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"collect_m_min_2_max_2":[5,17],"n":9.0,"collect_n_min_2_max_2":[6,9]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"collect_m_min_2_max_2":[17,12],"n":null,"collect_n_min_2_max_2":[6,9]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"collect_m_min_2_max_2":[17,12],"n":null,"collect_n_min_2_max_2":[6,9]} +{"_time":"1996-12-19T16:40:04.000000000","_key":"A","m":null,"collect_m_min_2_max_2":[17,12],"n":null,"collect_n_min_2_max_2":[6,9]} diff --git a/python/pytests/golden/count_if_test/test_count_if_since_true.jsonl b/python/pytests/golden/count_if_test/test_count_if_since_true.jsonl new file mode 100644 index 000000000..fb13339e6 --- /dev/null +++ b/python/pytests/golden/count_if_test/test_count_if_since_true.jsonl @@ -0,0 +1,8 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","is_valid":true,"count_if":1,"m":5.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","is_valid":true,"count_if":1,"m":24.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","is_valid":false,"count_if":0,"m":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","is_valid":false,"count_if":0,"m":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","is_valid":true,"count_if":1,"m":12.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","is_valid":null,"count_if":0,"m":null} +{"_time":"1996-12-19T16:40:03.000000000","_key":"B","is_valid":true,"count_if":1,"m":26.0} +{"_time":"1996-12-19T16:40:04.000000000","_key":"B","is_valid":true,"count_if":1,"m":30.0} diff --git a/python/pytests/golden/count_if_test/test_count_if_unwindowed.jsonl b/python/pytests/golden/count_if_test/test_count_if_unwindowed.jsonl new file mode 100644 index 000000000..5b9d5cbd1 --- /dev/null +++ b/python/pytests/golden/count_if_test/test_count_if_unwindowed.jsonl @@ -0,0 +1,8 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","is_valid":true,"count_if":1,"m":5.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","is_valid":true,"count_if":1,"m":24.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","is_valid":false,"count_if":1,"m":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","is_valid":false,"count_if":1,"m":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","is_valid":true,"count_if":2,"m":12.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","is_valid":null,"count_if":2,"m":null} +{"_time":"1996-12-19T16:40:03.000000000","_key":"B","is_valid":true,"count_if":2,"m":26.0} +{"_time":"1996-12-19T16:40:04.000000000","_key":"B","is_valid":true,"count_if":3,"m":30.0} diff --git a/python/pytests/golden/count_if_test/test_count_if_windowed.jsonl b/python/pytests/golden/count_if_test/test_count_if_windowed.jsonl new file mode 100644 index 000000000..7b8430105 --- /dev/null +++ b/python/pytests/golden/count_if_test/test_count_if_windowed.jsonl @@ -0,0 +1,8 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","is_valid":true,"count_if":1,"m":5.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","is_valid":true,"count_if":1,"m":24.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","is_valid":false,"count_if":1,"m":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","is_valid":false,"count_if":1,"m":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","is_valid":true,"count_if":2,"m":12.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","is_valid":null,"count_if":2,"m":null} +{"_time":"1996-12-19T16:40:03.000000000","_key":"B","is_valid":true,"count_if":2,"m":26.0} +{"_time":"1996-12-19T16:40:04.000000000","_key":"B","is_valid":true,"count_if":1,"m":30.0} diff --git a/python/pytests/golden/count_test/test_count_since_true.jsonl b/python/pytests/golden/count_test/test_count_since_true.jsonl new file mode 100644 index 000000000..9abfa57d3 --- /dev/null +++ b/python/pytests/golden/count_test/test_count_since_true.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"m_count":1} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"m_count":1} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"m_count":1} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"m_count":0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"m_count":1} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"m_count":0} diff --git a/python/pytests/golden/count_test/test_count_unwindowed.jsonl b/python/pytests/golden/count_test/test_count_unwindowed.jsonl new file mode 100644 index 000000000..001adfc5f --- /dev/null +++ b/python/pytests/golden/count_test/test_count_unwindowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"count_m":1,"n":10.0,"count_n":1} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"count_m":1,"n":3.0,"count_n":1} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"count_m":2,"n":6.0,"count_n":2} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"count_m":2,"n":9.0,"count_n":3} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"count_m":3,"n":null,"count_n":3} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"count_m":3,"n":null,"count_n":3} diff --git a/python/pytests/golden/count_test/test_count_windowed.jsonl b/python/pytests/golden/count_test/test_count_windowed.jsonl new file mode 100644 index 000000000..500248e66 --- /dev/null +++ b/python/pytests/golden/count_test/test_count_windowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"count_m":1,"n":10.0,"count_n":1} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"count_m":1,"n":3.0,"count_n":1} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"count_m":2,"n":6.0,"count_n":2} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"count_m":2,"n":9.0,"count_n":3} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"count_m":3,"n":null,"count_n":3} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"count_m":3,"n":null,"count_n":1} diff --git a/python/pytests/golden/csv_string_source_test/test_read_csv.jsonl b/python/pytests/golden/csv_string_source_test/test_read_csv.jsonl new file mode 100644 index 000000000..c1d8ce147 --- /dev/null +++ b/python/pytests/golden/csv_string_source_test/test_read_csv.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0,"n":10.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","key":"B","m":24.0,"n":3.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"n":6.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","time":"1996-12-19T16:40:00.000000000","key":"A","m":null,"n":9.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","time":"1996-12-19T16:40:01.000000000","key":"A","m":12.0,"n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","time":"1996-12-19T16:40:02.000000000","key":"A","m":null,"n":null} diff --git a/python/pytests/golden/csv_string_source_test/test_read_csv_1.jsonl b/python/pytests/golden/csv_string_source_test/test_read_csv_1.jsonl new file mode 100644 index 000000000..59c70427f --- /dev/null +++ b/python/pytests/golden/csv_string_source_test/test_read_csv_1.jsonl @@ -0,0 +1,12 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0,"n":10.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","key":"B","m":24.0,"n":3.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"n":6.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","time":"1996-12-19T16:40:00.000000000","key":"A","m":null,"n":9.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","time":"1996-12-19T16:40:01.000000000","key":"A","m":12.0,"n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","time":"1996-12-19T16:40:02.000000000","key":"A","m":null,"n":null} +{"_time":"1996-12-19T17:39:57.000000000","_key":"A","time":"1996-12-19T17:39:57.000000000","key":"A","m":5.0,"n":10.0} +{"_time":"1996-12-19T17:39:58.000000000","_key":"B","time":"1996-12-19T17:39:58.000000000","key":"B","m":24.0,"n":3.0} +{"_time":"1996-12-19T17:39:59.000000000","_key":"A","time":"1996-12-19T17:39:59.000000000","key":"A","m":17.0,"n":6.0} +{"_time":"1996-12-19T17:40:00.000000000","_key":"A","time":"1996-12-19T17:40:00.000000000","key":"A","m":null,"n":9.0} +{"_time":"1996-12-19T17:40:01.000000000","_key":"A","time":"1996-12-19T17:40:01.000000000","key":"A","m":12.0,"n":null} +{"_time":"1996-12-19T17:40:02.000000000","_key":"A","time":"1996-12-19T17:40:02.000000000","key":"A","m":null,"n":null} diff --git a/python/pytests/golden/else_test/test_else_.jsonl b/python/pytests/golden/else_test/test_else_.jsonl new file mode 100644 index 000000000..3a4a2439b --- /dev/null +++ b/python/pytests/golden/else_test/test_else_.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"condition_m":false,"if_else_m":-1,"n":10.0,"condition_n":true,"if_n":10} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"condition_m":true,"if_else_m":24,"n":3.0,"condition_n":false,"if_n":-1} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"condition_m":true,"if_else_m":17,"n":6.0,"condition_n":true,"if_n":6} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"condition_m":null,"if_else_m":-1,"n":9.0,"condition_n":true,"if_n":9} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"condition_m":false,"if_else_m":-1,"n":null,"condition_n":null,"if_n":-1} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"condition_m":null,"if_else_m":-1,"n":null,"condition_n":null,"if_n":-1} diff --git a/python/pytests/golden/else_test/test_else_debug.jsonl b/python/pytests/golden/else_test/test_else_debug.jsonl new file mode 100644 index 000000000..616705799 --- /dev/null +++ b/python/pytests/golden/else_test/test_else_debug.jsonl @@ -0,0 +1,2 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","default_record":null,"overide":{"test":"override_val"},"override_else_default":{"test":"override_val"}} +{"_time":"1996-12-19T16:39:58.000000000","_key":"A","default_record":{"test":"default"},"overide":null,"override_else_default":{"test":"default"}} diff --git a/python/pytests/golden/filter_test/test_filter.jsonl b/python/pytests/golden/filter_test/test_filter.jsonl new file mode 100644 index 000000000..e7bf7974f --- /dev/null +++ b/python/pytests/golden/filter_test/test_filter.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"condition_m":false,"filter_m":null,"n":10.0,"condition_n":true,"filter_n":10.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"condition_m":true,"filter_m":24.0,"n":3.0,"condition_n":false,"filter_n":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"condition_m":true,"filter_m":17.0,"n":6.0,"condition_n":true,"filter_n":6.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"condition_m":null,"filter_m":null,"n":9.0,"condition_n":true,"filter_n":9.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"condition_m":false,"filter_m":null,"n":null,"condition_n":null,"filter_n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"condition_m":null,"filter_m":null,"n":null,"condition_n":null,"filter_n":null} diff --git a/python/pytests/golden/flatten_test/test_flatten.jsonl b/python/pytests/golden/flatten_test/test_flatten.jsonl new file mode 100644 index 000000000..dcb02106d --- /dev/null +++ b/python/pytests/golden/flatten_test/test_flatten.jsonl @@ -0,0 +1,5 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","result":[5]} +{"_time":"1996-12-19T17:39:57.000000000","_key":"A","result":[]} +{"_time":"1996-12-19T18:39:57.000000000","_key":"A","result":[]} +{"_time":"1996-12-19T19:39:57.000000000","_key":"A","result":[6,7]} +{"_time":"1996-12-19T19:39:57.000000000","_key":"A","result":[7,8,9,10]} diff --git a/python/pytests/golden/if_test/test_if_.jsonl b/python/pytests/golden/if_test/test_if_.jsonl new file mode 100644 index 000000000..714798eeb --- /dev/null +++ b/python/pytests/golden/if_test/test_if_.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"condition_m":false,"if_m":null,"n":10.0,"condition_n":true,"if_n":10.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"condition_m":true,"if_m":24.0,"n":3.0,"condition_n":false,"if_n":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"condition_m":true,"if_m":17.0,"n":6.0,"condition_n":true,"if_n":6.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"condition_m":null,"if_m":null,"n":9.0,"condition_n":true,"if_n":9.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"condition_m":false,"if_m":null,"n":null,"condition_n":null,"if_n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"condition_m":null,"if_m":null,"n":null,"condition_n":null,"if_n":null} diff --git a/python/pytests/golden/jsonl_string_source_test/test_read_jsonl.jsonl b/python/pytests/golden/jsonl_string_source_test/test_read_jsonl.jsonl new file mode 100644 index 000000000..3a8348c73 --- /dev/null +++ b/python/pytests/golden/jsonl_string_source_test/test_read_jsonl.jsonl @@ -0,0 +1 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","user":"A","m":5,"n":10} diff --git a/python/pytests/golden/jsonl_string_source_test/test_read_jsonl_1.jsonl b/python/pytests/golden/jsonl_string_source_test/test_read_jsonl_1.jsonl new file mode 100644 index 000000000..8ba76da02 --- /dev/null +++ b/python/pytests/golden/jsonl_string_source_test/test_read_jsonl_1.jsonl @@ -0,0 +1,3 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","user":"A","m":5,"n":10.0} +{"_time":"1996-12-19T16:40:57.000000000","_key":"A","time":"1996-12-19T16:40:57.000000000","user":"A","m":8,"n":10.0} +{"_time":"1996-12-19T16:41:57.000000000","_key":"B","time":"1996-12-19T16:41:57.000000000","user":"B","m":5,"n":null} diff --git a/python/pytests/golden/jsonl_string_source_test/test_read_jsonl_lists.jsonl b/python/pytests/golden/jsonl_string_source_test/test_read_jsonl_lists.jsonl new file mode 100644 index 000000000..5e6829ea7 --- /dev/null +++ b/python/pytests/golden/jsonl_string_source_test/test_read_jsonl_lists.jsonl @@ -0,0 +1 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","user":"A","m":[5,10],"n":10} diff --git a/python/pytests/golden/jsonl_string_source_test/test_read_jsonl_lists_1.jsonl b/python/pytests/golden/jsonl_string_source_test/test_read_jsonl_lists_1.jsonl new file mode 100644 index 000000000..ab5cf64b4 --- /dev/null +++ b/python/pytests/golden/jsonl_string_source_test/test_read_jsonl_lists_1.jsonl @@ -0,0 +1,3 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","user":"A","m":[5,10],"n":10} +{"_time":"1996-12-19T16:40:57.000000000","_key":"A","time":"1996-12-19T16:40:57.000000000","user":"A","m":[],"n":10} +{"_time":"1996-12-19T16:41:57.000000000","_key":"A","time":"1996-12-19T16:41:57.000000000","user":"A","m":null,"n":10} diff --git a/python/pytests/golden/lag_test/test_lag.jsonl b/python/pytests/golden/lag_test/test_lag.jsonl new file mode 100644 index 000000000..f5b7d8e1a --- /dev/null +++ b/python/pytests/golden/lag_test/test_lag.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"lag_1_m":null,"lag_2_m":null,"n":10.0,"lag_1_n":null,"lag_2_n":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"lag_1_m":null,"lag_2_m":null,"n":3.0,"lag_1_n":null,"lag_2_n":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"lag_1_m":5.0,"lag_2_m":null,"n":6.0,"lag_1_n":10.0,"lag_2_n":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"lag_1_m":5.0,"lag_2_m":null,"n":9.0,"lag_1_n":6.0,"lag_2_n":10.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"lag_1_m":17.0,"lag_2_m":5.0,"n":null,"lag_1_n":6.0,"lag_2_n":10.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"lag_1_m":17.0,"lag_2_m":5.0,"n":null,"lag_1_n":6.0,"lag_2_n":10.0} diff --git a/python/pytests/golden/lag_test/test_lag_list.jsonl b/python/pytests/golden/lag_test/test_lag_list.jsonl new file mode 100644 index 000000000..600b35f4d --- /dev/null +++ b/python/pytests/golden/lag_test/test_lag_list.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"list_m":[5],"lag_list_m":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"list_m":[24],"lag_list_m":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"list_m":[5,17],"lag_list_m":[5]} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"list_m":[5,17],"lag_list_m":[5,17]} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"list_m":[5,17,12],"lag_list_m":[5,17]} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"list_m":[5,17,12],"lag_list_m":[5,17,12]} diff --git a/python/pytests/golden/lag_test/test_lag_struct.jsonl b/python/pytests/golden/lag_test/test_lag_struct.jsonl new file mode 100644 index 000000000..0b8c24f5f --- /dev/null +++ b/python/pytests/golden/lag_test/test_lag_struct.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":null,"key":null,"m":null,"n":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":null,"key":null,"m":null,"n":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0,"n":10.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"n":6.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","time":"1996-12-19T16:40:00.000000000","key":"A","m":null,"n":9.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","time":"1996-12-19T16:40:01.000000000","key":"A","m":12.0,"n":null} diff --git a/python/pytests/golden/length_test/test_length.jsonl b/python/pytests/golden/length_test/test_length.jsonl new file mode 100644 index 000000000..6ec7150f5 --- /dev/null +++ b/python/pytests/golden/length_test/test_length.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","str":"apple","len_key":5.0,"list":["apple"],"len_list":1} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","str":"dog","len_key":3.0,"list":["dog"],"len_list":1} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","str":"carrot","len_key":6.0,"list":["apple","carrot"],"len_list":2} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","str":null,"len_key":null,"list":["apple","carrot"],"len_list":2} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","str":"eggplant","len_key":8.0,"list":["apple","carrot","eggplant"],"len_list":3} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","str":"fig","len_key":3.0,"list":["apple","carrot","eggplant","fig"],"len_list":4} diff --git a/python/pytests/golden/lookup_test/test_lookup.jsonl b/python/pytests/golden/lookup_test/test_lookup.jsonl new file mode 100644 index 000000000..a09b280e9 --- /dev/null +++ b/python/pytests/golden/lookup_test/test_lookup.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","state":"WA","lookup":null,"lookup_last":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","state":"NC","lookup":24.0,"lookup_last":24.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","state":"WA","lookup":17.0,"lookup_last":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","state":"NC","lookup":null,"lookup_last":24.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","state":"SC","lookup":null,"lookup_last":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","state":"WA","lookup":null,"lookup_last":12.0} diff --git a/python/pytests/golden/math_test/test_math_int64.jsonl b/python/pytests/golden/math_test/test_math_int64.jsonl new file mode 100644 index 000000000..f648bf8a0 --- /dev/null +++ b/python/pytests/golden/math_test/test_math_int64.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"n":10.0,"add":15.0,"sub":-5.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"n":3.0,"add":27.0,"sub":21.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"n":6.0,"add":23.0,"sub":11.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"n":9.0,"add":null,"sub":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"n":null,"add":null,"sub":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"n":null,"add":null,"sub":null} diff --git a/python/pytests/golden/max_test/test_max_since_true.jsonl b/python/pytests/golden/max_test/test_max_since_true.jsonl new file mode 100644 index 000000000..19ff90403 --- /dev/null +++ b/python/pytests/golden/max_test/test_max_since_true.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"m_max":5.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"m_max":24.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"m_max":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"m_max":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"m_max":12.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"m_max":null} diff --git a/python/pytests/golden/max_test/test_max_unwindowed.jsonl b/python/pytests/golden/max_test/test_max_unwindowed.jsonl new file mode 100644 index 000000000..7aa7ede84 --- /dev/null +++ b/python/pytests/golden/max_test/test_max_unwindowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"max_m":5,"n":10.0,"max_n":10} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"max_m":24,"n":3.0,"max_n":3} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"max_m":17,"n":6.0,"max_n":10} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"max_m":17,"n":9.0,"max_n":10} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"max_m":17,"n":null,"max_n":10} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"max_m":17,"n":null,"max_n":10} diff --git a/python/pytests/golden/max_test/test_max_windowed.jsonl b/python/pytests/golden/max_test/test_max_windowed.jsonl new file mode 100644 index 000000000..4b40d71ac --- /dev/null +++ b/python/pytests/golden/max_test/test_max_windowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"max_m":5,"n":10.0,"max_n":10} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"max_m":24,"n":3.0,"max_n":3} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"max_m":17,"n":6.0,"max_n":10} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"max_m":17,"n":9.0,"max_n":10} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"max_m":17,"n":null,"max_n":10} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"max_m":17,"n":null,"max_n":9} diff --git a/python/pytests/golden/mean_test/test_mean_since_true.jsonl b/python/pytests/golden/mean_test/test_mean_since_true.jsonl new file mode 100644 index 000000000..fe52336cf --- /dev/null +++ b/python/pytests/golden/mean_test/test_mean_since_true.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"m_mean":5.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"m_mean":24.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"m_mean":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"m_mean":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"m_mean":12.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"m_mean":null} diff --git a/python/pytests/golden/mean_test/test_mean_unwindowed.jsonl b/python/pytests/golden/mean_test/test_mean_unwindowed.jsonl new file mode 100644 index 000000000..f56108923 --- /dev/null +++ b/python/pytests/golden/mean_test/test_mean_unwindowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"mean_m":5.0,"n":10.0,"mean_n":10.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"mean_m":24.0,"n":3.0,"mean_n":3.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"mean_m":11.0,"n":6.0,"mean_n":8.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"mean_m":11.0,"n":9.0,"mean_n":8.3333333333} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"mean_m":11.3333333333,"n":null,"mean_n":8.3333333333} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"mean_m":11.3333333333,"n":null,"mean_n":8.3333333333} diff --git a/python/pytests/golden/mean_test/test_mean_windowed.jsonl b/python/pytests/golden/mean_test/test_mean_windowed.jsonl new file mode 100644 index 000000000..65278123e --- /dev/null +++ b/python/pytests/golden/mean_test/test_mean_windowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"mean_m":5.0,"n":10.0,"mean_n":10.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"mean_m":24.0,"n":3.0,"mean_n":3.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"mean_m":11.0,"n":6.0,"mean_n":8.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"mean_m":11.0,"n":9.0,"mean_n":8.3333333333} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"mean_m":11.3333333333,"n":null,"mean_n":8.3333333333} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"mean_m":11.3333333333,"n":null,"mean_n":9.0} diff --git a/python/pytests/golden/min_test/test_min_since_true.jsonl b/python/pytests/golden/min_test/test_min_since_true.jsonl new file mode 100644 index 000000000..9c7e44b10 --- /dev/null +++ b/python/pytests/golden/min_test/test_min_since_true.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"m_min":5.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"m_min":24.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"m_min":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"m_min":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"m_min":12.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"m_min":null} diff --git a/python/pytests/golden/min_test/test_min_unwindowed.jsonl b/python/pytests/golden/min_test/test_min_unwindowed.jsonl new file mode 100644 index 000000000..24244d712 --- /dev/null +++ b/python/pytests/golden/min_test/test_min_unwindowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"min_m":5,"n":10.0,"min_n":10} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"min_m":24,"n":3.0,"min_n":3} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"min_m":5,"n":6.0,"min_n":6} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"min_m":5,"n":9.0,"min_n":6} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"min_m":5,"n":null,"min_n":6} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"min_m":5,"n":null,"min_n":6} diff --git a/python/pytests/golden/min_test/test_min_windowed.jsonl b/python/pytests/golden/min_test/test_min_windowed.jsonl new file mode 100644 index 000000000..3c5b0599c --- /dev/null +++ b/python/pytests/golden/min_test/test_min_windowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"min_m":5,"n":10.0,"min_n":10} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"min_m":24,"n":3.0,"min_n":3} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"min_m":5,"n":6.0,"min_n":6} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"min_m":5,"n":9.0,"min_n":6} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"min_m":5,"n":null,"min_n":6} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"min_m":5,"n":null,"min_n":9} diff --git a/python/pytests/golden/null_test/test_is_null.jsonl b/python/pytests/golden/null_test/test_is_null.jsonl new file mode 100644 index 000000000..f308e20d1 --- /dev/null +++ b/python/pytests/golden/null_test/test_is_null.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"is_null_m":false,"is_not_null_m":true,"n":10.0,"is_null_n":false,"is_not_null_n":true} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"is_null_m":false,"is_not_null_m":true,"n":3.0,"is_null_n":false,"is_not_null_n":true} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"is_null_m":false,"is_not_null_m":true,"n":6.0,"is_null_n":false,"is_not_null_n":true} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"is_null_m":true,"is_not_null_m":false,"n":9.0,"is_null_n":false,"is_not_null_n":true} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"is_null_m":false,"is_not_null_m":true,"n":null,"is_null_n":true,"is_not_null_n":false} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"is_null_m":true,"is_not_null_m":false,"n":null,"is_null_n":true,"is_not_null_n":false} diff --git a/python/pytests/golden/null_test/test_null_if.jsonl b/python/pytests/golden/null_test/test_null_if.jsonl new file mode 100644 index 000000000..5e93fc7a5 --- /dev/null +++ b/python/pytests/golden/null_test/test_null_if.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"condition_m":false,"null_if_m":5.0,"n":10.0,"condition_n":true,"null_if_n":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"condition_m":true,"null_if_m":null,"n":3.0,"condition_n":false,"null_if_n":3.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"condition_m":true,"null_if_m":null,"n":6.0,"condition_n":true,"null_if_n":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"condition_m":null,"null_if_m":null,"n":9.0,"condition_n":true,"null_if_n":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"condition_m":false,"null_if_m":12.0,"n":null,"condition_n":null,"null_if_n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"condition_m":null,"null_if_m":null,"n":null,"condition_n":null,"null_if_n":null} diff --git a/python/pytests/golden/pandas_source_test/test_add_dataframe.jsonl b/python/pytests/golden/pandas_source_test/test_add_dataframe.jsonl new file mode 100644 index 000000000..837b03a7f --- /dev/null +++ b/python/pytests/golden/pandas_source_test/test_add_dataframe.jsonl @@ -0,0 +1 @@ +{"_time":"1970-01-01T00:16:40.000000000","_key":"a","time":1000000000000,"key":"a"} diff --git a/python/pytests/golden/pandas_source_test/test_add_dataframe_1.jsonl b/python/pytests/golden/pandas_source_test/test_add_dataframe_1.jsonl new file mode 100644 index 000000000..ab7b423ee --- /dev/null +++ b/python/pytests/golden/pandas_source_test/test_add_dataframe_1.jsonl @@ -0,0 +1,2 @@ +{"_time":"1970-01-01T00:16:40.000000000","_key":"a","time":1000000000000,"key":"a"} +{"_time":"1970-01-01T00:16:40.000000000","_key":"a","time":1000000000000,"key":"a"} diff --git a/python/pytests/golden/pandas_source_test/test_float_milliseconds.jsonl b/python/pytests/golden/pandas_source_test/test_float_milliseconds.jsonl new file mode 100644 index 000000000..6b4e5d7fa --- /dev/null +++ b/python/pytests/golden/pandas_source_test/test_float_milliseconds.jsonl @@ -0,0 +1 @@ +{"_time":"2022-12-19T19:17:52.026118912","_key":"tom","time":1671477472026.1188964844,"user":"tom"} diff --git a/python/pytests/golden/pandas_source_test/test_float_nanoseconds.jsonl b/python/pytests/golden/pandas_source_test/test_float_nanoseconds.jsonl new file mode 100644 index 000000000..b5427b0c5 --- /dev/null +++ b/python/pytests/golden/pandas_source_test/test_float_nanoseconds.jsonl @@ -0,0 +1 @@ +{"_time":"2022-12-19T19:17:52.026119000","_key":"tom","time":1671477472026119000,"user":"tom"} diff --git a/python/pytests/golden/pandas_source_test/test_float_seconds.jsonl b/python/pytests/golden/pandas_source_test/test_float_seconds.jsonl new file mode 100644 index 000000000..61a33fb5e --- /dev/null +++ b/python/pytests/golden/pandas_source_test/test_float_seconds.jsonl @@ -0,0 +1 @@ +{"_time":"2022-12-19T19:17:52.026118912","_key":"tom","time":1671477472.0261189938,"user":"tom"} diff --git a/python/pytests/golden/parquet_source_test/test_read_parquet.jsonl b/python/pytests/golden/parquet_source_test/test_read_parquet.jsonl new file mode 100644 index 000000000..076d1f1aa --- /dev/null +++ b/python/pytests/golden/parquet_source_test/test_read_parquet.jsonl @@ -0,0 +1,10 @@ +{"_time":"2020-01-01T00:00:00.000000000","_key":"karen","id":"cb_001","purchase_time":"2020-01-01T00:00:00.000000000","customer_id":"karen","vendor_id":"chum_bucket","amount":9,"subsort_id":0} +{"_time":"2020-01-01T00:00:00.000000000","_key":"patrick","id":"kk_001","purchase_time":"2020-01-01T00:00:00.000000000","customer_id":"patrick","vendor_id":"krusty_krab","amount":3,"subsort_id":1} +{"_time":"2020-01-02T00:00:00.000000000","_key":"karen","id":"cb_002","purchase_time":"2020-01-02T00:00:00.000000000","customer_id":"karen","vendor_id":"chum_bucket","amount":2,"subsort_id":2} +{"_time":"2020-01-02T00:00:00.000000000","_key":"patrick","id":"kk_002","purchase_time":"2020-01-02T00:00:00.000000000","customer_id":"patrick","vendor_id":"krusty_krab","amount":5,"subsort_id":3} +{"_time":"2020-01-03T00:00:00.000000000","_key":"karen","id":"cb_003","purchase_time":"2020-01-03T00:00:00.000000000","customer_id":"karen","vendor_id":"chum_bucket","amount":4,"subsort_id":4} +{"_time":"2020-01-03T00:00:00.000000000","_key":"patrick","id":"kk_003","purchase_time":"2020-01-03T00:00:00.000000000","customer_id":"patrick","vendor_id":"krusty_krab","amount":12,"subsort_id":5} +{"_time":"2020-01-04T00:00:00.000000000","_key":"patrick","id":"cb_004","purchase_time":"2020-01-04T00:00:00.000000000","customer_id":"patrick","vendor_id":"chum_bucket","amount":5000,"subsort_id":6} +{"_time":"2020-01-04T00:00:00.000000000","_key":"karen","id":"cb_005","purchase_time":"2020-01-04T00:00:00.000000000","customer_id":"karen","vendor_id":"chum_bucket","amount":3,"subsort_id":7} +{"_time":"2020-01-05T00:00:00.000000000","_key":"karen","id":"cb_006","purchase_time":"2020-01-05T00:00:00.000000000","customer_id":"karen","vendor_id":"chum_bucket","amount":5,"subsort_id":8} +{"_time":"2020-01-05T00:00:00.000000000","_key":"patrick","id":"kk_004","purchase_time":"2020-01-05T00:00:00.000000000","customer_id":"patrick","vendor_id":"krusty_krab","amount":9,"subsort_id":9} diff --git a/python/pytests/golden/parquet_source_test/test_read_parquet_1.jsonl b/python/pytests/golden/parquet_source_test/test_read_parquet_1.jsonl new file mode 100644 index 000000000..f37105ffb --- /dev/null +++ b/python/pytests/golden/parquet_source_test/test_read_parquet_1.jsonl @@ -0,0 +1,15 @@ +{"_time":"2020-01-01T00:00:00.000000000","_key":"karen","id":"cb_001","purchase_time":"2020-01-01T00:00:00.000000000","customer_id":"karen","vendor_id":"chum_bucket","amount":9,"subsort_id":0} +{"_time":"2020-01-01T00:00:00.000000000","_key":"patrick","id":"kk_001","purchase_time":"2020-01-01T00:00:00.000000000","customer_id":"patrick","vendor_id":"krusty_krab","amount":3,"subsort_id":1} +{"_time":"2020-01-02T00:00:00.000000000","_key":"karen","id":"cb_002","purchase_time":"2020-01-02T00:00:00.000000000","customer_id":"karen","vendor_id":"chum_bucket","amount":2,"subsort_id":2} +{"_time":"2020-01-02T00:00:00.000000000","_key":"patrick","id":"kk_002","purchase_time":"2020-01-02T00:00:00.000000000","customer_id":"patrick","vendor_id":"krusty_krab","amount":5,"subsort_id":3} +{"_time":"2020-01-03T00:00:00.000000000","_key":"karen","id":"cb_003","purchase_time":"2020-01-03T00:00:00.000000000","customer_id":"karen","vendor_id":"chum_bucket","amount":4,"subsort_id":4} +{"_time":"2020-01-03T00:00:00.000000000","_key":"patrick","id":"kk_003","purchase_time":"2020-01-03T00:00:00.000000000","customer_id":"patrick","vendor_id":"krusty_krab","amount":12,"subsort_id":5} +{"_time":"2020-01-04T00:00:00.000000000","_key":"patrick","id":"cb_004","purchase_time":"2020-01-04T00:00:00.000000000","customer_id":"patrick","vendor_id":"chum_bucket","amount":5000,"subsort_id":6} +{"_time":"2020-01-04T00:00:00.000000000","_key":"karen","id":"cb_005","purchase_time":"2020-01-04T00:00:00.000000000","customer_id":"karen","vendor_id":"chum_bucket","amount":3,"subsort_id":7} +{"_time":"2020-01-05T00:00:00.000000000","_key":"karen","id":"cb_006","purchase_time":"2020-01-05T00:00:00.000000000","customer_id":"karen","vendor_id":"chum_bucket","amount":5,"subsort_id":8} +{"_time":"2020-01-05T00:00:00.000000000","_key":"patrick","id":"kk_004","purchase_time":"2020-01-05T00:00:00.000000000","customer_id":"patrick","vendor_id":"krusty_krab","amount":9,"subsort_id":9} +{"_time":"2020-01-06T00:00:00.000000000","_key":"patrick","id":"kk_005","purchase_time":"2020-01-06T00:00:00.000000000","customer_id":"patrick","vendor_id":"krusty_krab","amount":2,"subsort_id":0} +{"_time":"2020-01-06T00:00:00.000000000","_key":"spongebob","id":"wh_001","purchase_time":"2020-01-06T00:00:00.000000000","customer_id":"spongebob","vendor_id":"weenie_hut","amount":7,"subsort_id":1} +{"_time":"2020-01-07T00:00:00.000000000","_key":"spongebob","id":"cb_007","purchase_time":"2020-01-07T00:00:00.000000000","customer_id":"spongebob","vendor_id":"chum_bucket","amount":34,"subsort_id":2} +{"_time":"2020-01-08T00:00:00.000000000","_key":"karen","id":"wh_002","purchase_time":"2020-01-08T00:00:00.000000000","customer_id":"karen","vendor_id":"weenie_hut","amount":8,"subsort_id":3} +{"_time":"2020-01-08T00:00:00.000000000","_key":"patrick","id":"kk_006","purchase_time":"2020-01-08T00:00:00.000000000","customer_id":"patrick","vendor_id":"krusty_krab","amount":9,"subsort_id":4} diff --git a/python/pytests/golden/pylist_source_test/test_read_pylist.jsonl b/python/pytests/golden/pylist_source_test/test_read_pylist.jsonl new file mode 100644 index 000000000..962a8f4b7 --- /dev/null +++ b/python/pytests/golden/pylist_source_test/test_read_pylist.jsonl @@ -0,0 +1 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57","user":"A","m":5,"n":10} diff --git a/python/pytests/golden/pylist_source_test/test_read_pylist_1.jsonl b/python/pytests/golden/pylist_source_test/test_read_pylist_1.jsonl new file mode 100644 index 000000000..acee4bfbb --- /dev/null +++ b/python/pytests/golden/pylist_source_test/test_read_pylist_1.jsonl @@ -0,0 +1,3 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57","user":"A","m":5,"n":10.0} +{"_time":"1996-12-19T16:40:57.000000000","_key":"A","time":"1996-12-19T16:40:57","user":"A","m":8,"n":10.0} +{"_time":"1996-12-19T16:41:57.000000000","_key":"B","time":"1996-12-19T16:41:57","user":"B","m":5,"n":null} diff --git a/python/pytests/golden/pylist_source_test/test_read_pylist_2.jsonl b/python/pytests/golden/pylist_source_test/test_read_pylist_2.jsonl new file mode 100644 index 000000000..5f6b5fc59 --- /dev/null +++ b/python/pytests/golden/pylist_source_test/test_read_pylist_2.jsonl @@ -0,0 +1,4 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57","user":"A","m":5,"n":10.0} +{"_time":"1996-12-19T16:40:57.000000000","_key":"A","time":"1996-12-19T16:40:57","user":"A","m":8,"n":10.0} +{"_time":"1996-12-19T16:41:57.000000000","_key":"B","time":"1996-12-19T16:41:57","user":"B","m":5,"n":null} +{"_time":"1996-12-19T16:42:57.000000000","_key":"A","time":"1996-12-19T16:42:57","user":"A","m":8,"n":10.0} diff --git a/python/pytests/golden/pylist_source_test/test_read_pylist_ignore_column.jsonl b/python/pytests/golden/pylist_source_test/test_read_pylist_ignore_column.jsonl new file mode 100644 index 000000000..ed023369a --- /dev/null +++ b/python/pytests/golden/pylist_source_test/test_read_pylist_ignore_column.jsonl @@ -0,0 +1 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57","user":"A","n":10} diff --git a/python/pytests/golden/pylist_source_test/test_read_pylist_ignore_column_1.jsonl b/python/pytests/golden/pylist_source_test/test_read_pylist_ignore_column_1.jsonl new file mode 100644 index 000000000..85ca3e904 --- /dev/null +++ b/python/pytests/golden/pylist_source_test/test_read_pylist_ignore_column_1.jsonl @@ -0,0 +1,3 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57","user":"A","n":10.0} +{"_time":"1996-12-19T16:40:57.000000000","_key":"A","time":"1996-12-19T16:40:57","user":"A","n":10.0} +{"_time":"1996-12-19T16:41:57.000000000","_key":"A","time":"1996-12-19T16:41:57","user":"A","n":null} diff --git a/python/pytests/golden/pylist_source_test/test_read_pylist_lists.jsonl b/python/pytests/golden/pylist_source_test/test_read_pylist_lists.jsonl new file mode 100644 index 000000000..2d6638097 --- /dev/null +++ b/python/pytests/golden/pylist_source_test/test_read_pylist_lists.jsonl @@ -0,0 +1 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57","user":"A","m":[5,10],"n":10} diff --git a/python/pytests/golden/pylist_source_test/test_read_pylist_lists_1.jsonl b/python/pytests/golden/pylist_source_test/test_read_pylist_lists_1.jsonl new file mode 100644 index 000000000..37965f998 --- /dev/null +++ b/python/pytests/golden/pylist_source_test/test_read_pylist_lists_1.jsonl @@ -0,0 +1,3 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57","user":"A","m":[5,10],"n":10} +{"_time":"1996-12-19T16:40:57.000000000","_key":"A","time":"1996-12-19T16:40:57","user":"A","m":[],"n":10} +{"_time":"1996-12-19T16:41:57.000000000","_key":"A","time":"1996-12-19T16:41:57","user":"A","m":null,"n":10} diff --git a/python/pytests/golden/record_test/test_extend_input.jsonl b/python/pytests/golden/record_test/test_extend_input.jsonl new file mode 100644 index 000000000..7b4bc1fc8 --- /dev/null +++ b/python/pytests/golden/record_test/test_extend_input.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","add":15.0,"time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0,"n":10.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","add":27.0,"time":"1996-12-19T16:39:58.000000000","key":"B","m":24.0,"n":3.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","add":23.0,"time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"n":6.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","add":null,"time":"1996-12-19T16:40:00.000000000","key":"A","m":null,"n":9.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","add":null,"time":"1996-12-19T16:40:01.000000000","key":"A","m":12.0,"n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","add":null,"time":"1996-12-19T16:40:02.000000000","key":"A","m":null,"n":null} diff --git a/python/pytests/golden/record_test/test_extend_record.jsonl b/python/pytests/golden/record_test/test_extend_record.jsonl new file mode 100644 index 000000000..7b4bc1fc8 --- /dev/null +++ b/python/pytests/golden/record_test/test_extend_record.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","add":15.0,"time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0,"n":10.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","add":27.0,"time":"1996-12-19T16:39:58.000000000","key":"B","m":24.0,"n":3.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","add":23.0,"time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"n":6.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","add":null,"time":"1996-12-19T16:40:00.000000000","key":"A","m":null,"n":9.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","add":null,"time":"1996-12-19T16:40:01.000000000","key":"A","m":12.0,"n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","add":null,"time":"1996-12-19T16:40:02.000000000","key":"A","m":null,"n":null} diff --git a/python/pytests/golden/record_test/test_record.jsonl b/python/pytests/golden/record_test/test_record.jsonl new file mode 100644 index 000000000..3e648b535 --- /dev/null +++ b/python/pytests/golden/record_test/test_record.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"n":10.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"n":3.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"n":6.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"n":9.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"n":null} diff --git a/python/pytests/golden/record_test/test_remove_record.jsonl b/python/pytests/golden/record_test/test_remove_record.jsonl new file mode 100644 index 000000000..3f53f7ad0 --- /dev/null +++ b/python/pytests/golden/record_test/test_remove_record.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","key":"B","m":24.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","time":"1996-12-19T16:40:00.000000000","key":"A","m":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","time":"1996-12-19T16:40:01.000000000","key":"A","m":12.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","time":"1996-12-19T16:40:02.000000000","key":"A","m":null} diff --git a/python/pytests/golden/record_test/test_select_record.jsonl b/python/pytests/golden/record_test/test_select_record.jsonl new file mode 100644 index 000000000..c0a68bb0c --- /dev/null +++ b/python/pytests/golden/record_test/test_select_record.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","n":10.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","n":3.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","n":6.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","n":9.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","n":null} diff --git a/python/pytests/golden/result_test/test_iter_pandas.jsonl b/python/pytests/golden/result_test/test_iter_pandas.jsonl new file mode 100644 index 000000000..bdf1fc0e2 --- /dev/null +++ b/python/pytests/golden/result_test/test_iter_pandas.jsonl @@ -0,0 +1,2 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","key":"A","m":5,"n":10} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","key":"B","m":24,"n":3} diff --git a/python/pytests/golden/result_test/test_iter_pandas_1.jsonl b/python/pytests/golden/result_test/test_iter_pandas_1.jsonl new file mode 100644 index 000000000..23e6c121a --- /dev/null +++ b/python/pytests/golden/result_test/test_iter_pandas_1.jsonl @@ -0,0 +1,2 @@ +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"n":6} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","time":"1996-12-19T16:40:00.000000000","key":"A","m":null,"n":9} diff --git a/python/pytests/golden/result_test/test_iter_pandas_async.jsonl b/python/pytests/golden/result_test/test_iter_pandas_async.jsonl new file mode 100644 index 000000000..bdf1fc0e2 --- /dev/null +++ b/python/pytests/golden/result_test/test_iter_pandas_async.jsonl @@ -0,0 +1,2 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","key":"A","m":5,"n":10} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","key":"B","m":24,"n":3} diff --git a/python/pytests/golden/result_test/test_iter_pandas_async_1.jsonl b/python/pytests/golden/result_test/test_iter_pandas_async_1.jsonl new file mode 100644 index 000000000..23e6c121a --- /dev/null +++ b/python/pytests/golden/result_test/test_iter_pandas_async_1.jsonl @@ -0,0 +1,2 @@ +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"n":6} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","time":"1996-12-19T16:40:00.000000000","key":"A","m":null,"n":9} diff --git a/python/pytests/golden/result_test/test_iter_pandas_async_materialize.jsonl b/python/pytests/golden/result_test/test_iter_pandas_async_materialize.jsonl new file mode 100644 index 000000000..c1d8ce147 --- /dev/null +++ b/python/pytests/golden/result_test/test_iter_pandas_async_materialize.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0,"n":10.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","key":"B","m":24.0,"n":3.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"n":6.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","time":"1996-12-19T16:40:00.000000000","key":"A","m":null,"n":9.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","time":"1996-12-19T16:40:01.000000000","key":"A","m":12.0,"n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","time":"1996-12-19T16:40:02.000000000","key":"A","m":null,"n":null} diff --git a/python/pytests/golden/result_test/test_iter_pandas_async_materialize_1.jsonl b/python/pytests/golden/result_test/test_iter_pandas_async_materialize_1.jsonl new file mode 100644 index 000000000..922338e6f --- /dev/null +++ b/python/pytests/golden/result_test/test_iter_pandas_async_materialize_1.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-20T16:39:57.000000000","_key":"A","time":"1996-12-20T16:39:57.000000000","key":"A","m":5.0,"n":10.0} +{"_time":"1996-12-20T16:39:58.000000000","_key":"B","time":"1996-12-20T16:39:58.000000000","key":"B","m":24.0,"n":3.0} +{"_time":"1996-12-20T16:39:59.000000000","_key":"A","time":"1996-12-20T16:39:59.000000000","key":"A","m":17.0,"n":6.0} +{"_time":"1996-12-20T16:40:00.000000000","_key":"C","time":"1996-12-20T16:40:00.000000000","key":"C","m":null,"n":9.0} +{"_time":"1996-12-20T16:40:01.000000000","_key":"A","time":"1996-12-20T16:40:01.000000000","key":"A","m":12.0,"n":null} +{"_time":"1996-12-20T16:40:02.000000000","_key":"A","time":"1996-12-20T16:40:02.000000000","key":"A","m":null,"n":null} diff --git a/python/pytests/golden/seconds_since_previous_test/test_seconds_since_previous.jsonl b/python/pytests/golden/seconds_since_previous_test/test_seconds_since_previous.jsonl new file mode 100644 index 000000000..0466a634d --- /dev/null +++ b/python/pytests/golden/seconds_since_previous_test/test_seconds_since_previous.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","seconds_since":null,"seconds_since_1":null,"seconds_since_2":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","seconds_since":null,"seconds_since_1":null,"seconds_since_2":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","seconds_since":2.0,"seconds_since_1":2.0,"seconds_since_2":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","seconds_since":1.0,"seconds_since_1":1.0,"seconds_since_2":3.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","seconds_since":1.0,"seconds_since_1":1.0,"seconds_since_2":2.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","seconds_since":1.0,"seconds_since_1":1.0,"seconds_since_2":2.0} diff --git a/python/pytests/golden/seconds_since_test/test_seconds_since.jsonl b/python/pytests/golden/seconds_since_test/test_seconds_since.jsonl new file mode 100644 index 000000000..3ddf0ec7d --- /dev/null +++ b/python/pytests/golden/seconds_since_test/test_seconds_since.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","t1":"1996-12-19T16:39:57.000000000","t2":"1996-12-19T16:42:57.000000000","seconds_since_t1":180.0,"seconds_since_t2":-180.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","t1":"1996-12-19T16:39:58.000000000","t2":"1996-12-19T16:39:59.000000000","seconds_since_t1":1.0,"seconds_since_t2":-1.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","t1":"1996-12-19T16:39:59.000000000","t2":null,"seconds_since_t1":null,"seconds_since_t2":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","t1":"1996-12-19T16:40:00.000000000","t2":"1996-12-19T16:41:00.000000000","seconds_since_t1":60.0,"seconds_since_t2":-60.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","t1":"1996-12-19T16:40:01.000000000","t2":"1996-12-19T16:42:01.000000000","seconds_since_t1":120.0,"seconds_since_t2":-120.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","t1":"1996-12-19T16:40:02.000000000","t2":"1996-12-19T16:43:02.000000000","seconds_since_t1":180.0,"seconds_since_t2":-180.0} diff --git a/python/pytests/golden/seconds_since_test/test_seconds_since_datetime.jsonl b/python/pytests/golden/seconds_since_test/test_seconds_since_datetime.jsonl new file mode 100644 index 000000000..0547c7f13 --- /dev/null +++ b/python/pytests/golden/seconds_since_test/test_seconds_since_datetime.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","t1":"1996-12-19T16:39:57.000000000","seconds_since_literal":7} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","t1":"1996-12-19T16:39:58.000000000","seconds_since_literal":8} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","t1":"1996-12-19T16:39:59.000000000","seconds_since_literal":9} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","t1":"1996-12-19T16:40:00.000000000","seconds_since_literal":10} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","t1":"1996-12-19T16:40:01.000000000","seconds_since_literal":11} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","t1":"1996-12-19T16:40:02.000000000","seconds_since_literal":12} diff --git a/python/pytests/golden/shift_by_test/test_shift_by_timedelta.jsonl b/python/pytests/golden/shift_by_test/test_shift_by_timedelta.jsonl new file mode 100644 index 000000000..ece7c1824 --- /dev/null +++ b/python/pytests/golden/shift_by_test/test_shift_by_timedelta.jsonl @@ -0,0 +1,12 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","shift_by_1_s":null,"shift_by_1_m":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"A","time":null,"shift_by_1_s":"1996-12-19T16:39:57.000000000","shift_by_1_m":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","shift_by_1_s":null,"shift_by_1_m":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"B","time":null,"shift_by_1_s":"1996-12-19T16:39:58.000000000","shift_by_1_m":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","shift_by_1_s":null,"shift_by_1_m":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","time":null,"shift_by_1_s":"1996-12-19T16:39:59.000000000","shift_by_1_m":null} +{"_time":"1996-12-19T16:40:57.000000000","_key":"A","time":null,"shift_by_1_s":null,"shift_by_1_m":"1996-12-19T16:39:57.000000000"} +{"_time":"1996-12-19T16:40:58.000000000","_key":"B","time":null,"shift_by_1_s":null,"shift_by_1_m":"1996-12-19T16:39:58.000000000"} +{"_time":"1996-12-19T16:40:59.000000000","_key":"A","time":null,"shift_by_1_s":null,"shift_by_1_m":"1996-12-19T16:39:59.000000000"} +{"_time":"1997-01-18T16:40:00.000000000","_key":"A","time":"1997-01-18T16:40:00.000000000","shift_by_1_s":null,"shift_by_1_m":null} +{"_time":"1997-01-18T16:40:01.000000000","_key":"A","time":null,"shift_by_1_s":"1997-01-18T16:40:00.000000000","shift_by_1_m":null} +{"_time":"1997-01-18T16:41:00.000000000","_key":"A","time":null,"shift_by_1_s":null,"shift_by_1_m":"1997-01-18T16:40:00.000000000"} diff --git a/python/pytests/golden/shift_by_test/test_shift_collect.jsonl b/python/pytests/golden/shift_by_test/test_shift_collect.jsonl new file mode 100644 index 000000000..2990412da --- /dev/null +++ b/python/pytests/golden/shift_by_test/test_shift_collect.jsonl @@ -0,0 +1,12 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","shift_by_1_s_time":null,"shift_by_1_s_ms":null,"shift_by_1_m_time":null,"shift_by_1_m_ms":null,"time":"1996-12-19T16:39:57.000000000","ms":[5],"m":5.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"A","shift_by_1_s_time":"1996-12-19T16:39:57.000000000","shift_by_1_s_ms":[5],"shift_by_1_m_time":null,"shift_by_1_m_ms":null,"time":null,"ms":[5],"m":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","shift_by_1_s_time":null,"shift_by_1_s_ms":null,"shift_by_1_m_time":null,"shift_by_1_m_ms":null,"time":"1996-12-19T16:39:58.000000000","ms":[24],"m":24.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"B","shift_by_1_s_time":"1996-12-19T16:39:58.000000000","shift_by_1_s_ms":[24],"shift_by_1_m_time":null,"shift_by_1_m_ms":null,"time":null,"ms":[24],"m":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","shift_by_1_s_time":null,"shift_by_1_s_ms":null,"shift_by_1_m_time":null,"shift_by_1_m_ms":null,"time":"1996-12-19T16:39:59.000000000","ms":[5,17],"m":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","shift_by_1_s_time":"1996-12-19T16:39:59.000000000","shift_by_1_s_ms":[5,17],"shift_by_1_m_time":null,"shift_by_1_m_ms":null,"time":null,"ms":[5,17],"m":null} +{"_time":"1996-12-19T16:40:57.000000000","_key":"A","shift_by_1_s_time":null,"shift_by_1_s_ms":null,"shift_by_1_m_time":"1996-12-19T16:39:57.000000000","shift_by_1_m_ms":[5],"time":null,"ms":[5,17],"m":null} +{"_time":"1996-12-19T16:40:58.000000000","_key":"B","shift_by_1_s_time":null,"shift_by_1_s_ms":null,"shift_by_1_m_time":"1996-12-19T16:39:58.000000000","shift_by_1_m_ms":[24],"time":null,"ms":[24],"m":null} +{"_time":"1996-12-19T16:40:59.000000000","_key":"A","shift_by_1_s_time":null,"shift_by_1_s_ms":null,"shift_by_1_m_time":"1996-12-19T16:39:59.000000000","shift_by_1_m_ms":[5,17],"time":null,"ms":[5,17],"m":null} +{"_time":"1997-01-18T16:40:00.000000000","_key":"A","shift_by_1_s_time":null,"shift_by_1_s_ms":null,"shift_by_1_m_time":null,"shift_by_1_m_ms":null,"time":"1997-01-18T16:40:00.000000000","ms":[5,17],"m":null} +{"_time":"1997-01-18T16:40:01.000000000","_key":"A","shift_by_1_s_time":"1997-01-18T16:40:00.000000000","shift_by_1_s_ms":[5,17],"shift_by_1_m_time":null,"shift_by_1_m_ms":null,"time":null,"ms":[5,17],"m":null} +{"_time":"1997-01-18T16:41:00.000000000","_key":"A","shift_by_1_s_time":null,"shift_by_1_s_ms":null,"shift_by_1_m_time":"1997-01-18T16:40:00.000000000","shift_by_1_m_ms":[5,17],"time":null,"ms":[5,17],"m":null} diff --git a/python/pytests/golden/shift_to_test/test_shift_to_column.jsonl b/python/pytests/golden/shift_to_test/test_shift_to_column.jsonl new file mode 100644 index 000000000..5ecc6a7e3 --- /dev/null +++ b/python/pytests/golden/shift_to_test/test_shift_to_column.jsonl @@ -0,0 +1,8 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","time_plus_seconds":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","time_plus_seconds":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","time_plus_seconds":null} +{"_time":"1996-12-19T16:40:07.000000000","_key":"A","time":null,"time_plus_seconds":"1996-12-19T16:39:57.000000000"} +{"_time":"1996-12-19T16:40:08.000000000","_key":"B","time":null,"time_plus_seconds":"1996-12-19T16:39:58.000000000"} +{"_time":"1996-12-19T16:40:09.000000000","_key":"A","time":null,"time_plus_seconds":"1996-12-19T16:39:59.000000000"} +{"_time":"1997-01-18T16:40:00.000000000","_key":"A","time":"1997-01-18T16:40:00.000000000","time_plus_seconds":null} +{"_time":"1997-01-18T16:40:10.000000000","_key":"A","time":null,"time_plus_seconds":"1997-01-18T16:40:00.000000000"} diff --git a/python/pytests/golden/shift_until_test/test_shift_until_predicate.jsonl b/python/pytests/golden/shift_until_test/test_shift_until_predicate.jsonl new file mode 100644 index 000000000..84edde0e4 --- /dev/null +++ b/python/pytests/golden/shift_until_test/test_shift_until_predicate.jsonl @@ -0,0 +1,11 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"sum_m":5,"predicate":false,"shift_until":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"sum_m":24,"predicate":false,"shift_until":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"sum_m":22,"predicate":false,"shift_until":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"sum_m":22,"predicate":false,"shift_until":5.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"sum_m":22,"predicate":false,"shift_until":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"sum_m":22,"predicate":false,"shift_until":10.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":10.0,"sum_m":32,"predicate":true,"shift_until":10.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":null,"sum_m":32,"predicate":true,"shift_until":12.0} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"sum_m":44,"predicate":true,"shift_until":12.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"sum_m":44,"predicate":true,"shift_until":12.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"sum_m":44,"predicate":true,"shift_until":12.0} diff --git a/python/pytests/golden/stddev_test/test_stddev_since_true.jsonl b/python/pytests/golden/stddev_test/test_stddev_since_true.jsonl new file mode 100644 index 000000000..c1b35c762 --- /dev/null +++ b/python/pytests/golden/stddev_test/test_stddev_since_true.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"m_stddev":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"m_stddev":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"m_stddev":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"m_stddev":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"m_stddev":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"m_stddev":null} diff --git a/python/pytests/golden/stddev_test/test_stddev_unwindowed.jsonl b/python/pytests/golden/stddev_test/test_stddev_unwindowed.jsonl new file mode 100644 index 000000000..23fca12bc --- /dev/null +++ b/python/pytests/golden/stddev_test/test_stddev_unwindowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"stddev_m":null,"n":10.0,"stddev_n":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"stddev_m":null,"n":3.0,"stddev_n":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"stddev_m":6.0,"n":6.0,"stddev_n":2.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"stddev_m":6.0,"n":9.0,"stddev_n":1.6996731712} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"stddev_m":4.9216076867,"n":null,"stddev_n":1.6996731712} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"stddev_m":4.9216076867,"n":null,"stddev_n":1.6996731712} diff --git a/python/pytests/golden/stddev_test/test_stddev_windowed.jsonl b/python/pytests/golden/stddev_test/test_stddev_windowed.jsonl new file mode 100644 index 000000000..ccd03a681 --- /dev/null +++ b/python/pytests/golden/stddev_test/test_stddev_windowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"stddev_m":null,"n":10.0,"stddev_n":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"stddev_m":null,"n":3.0,"stddev_n":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"stddev_m":6.0,"n":6.0,"stddev_n":2.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"stddev_m":6.0,"n":9.0,"stddev_n":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"stddev_m":4.9216076867,"n":null,"stddev_n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"stddev_m":4.9216076867,"n":null,"stddev_n":null} diff --git a/python/pytests/golden/sum_test/test_sum_since_true.jsonl b/python/pytests/golden/sum_test/test_sum_since_true.jsonl new file mode 100644 index 000000000..bd043c593 --- /dev/null +++ b/python/pytests/golden/sum_test/test_sum_since_true.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"m_sum":5.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"m_sum":24.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"m_sum":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"m_sum":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"m_sum":12.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"m_sum":null} diff --git a/python/pytests/golden/sum_test/test_sum_unwindowed.jsonl b/python/pytests/golden/sum_test/test_sum_unwindowed.jsonl new file mode 100644 index 000000000..abfb3ffd2 --- /dev/null +++ b/python/pytests/golden/sum_test/test_sum_unwindowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"sum_m":5,"n":10.0,"sum_n":10} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"sum_m":24,"n":3.0,"sum_n":3} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"sum_m":22,"n":6.0,"sum_n":16} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"sum_m":22,"n":9.0,"sum_n":25} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"sum_m":34,"n":null,"sum_n":25} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"sum_m":34,"n":null,"sum_n":25} diff --git a/python/pytests/golden/sum_test/test_sum_windowed.jsonl b/python/pytests/golden/sum_test/test_sum_windowed.jsonl new file mode 100644 index 000000000..5bb6a616a --- /dev/null +++ b/python/pytests/golden/sum_test/test_sum_windowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"sum_m":5,"n":10.0,"sum_n":10} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"sum_m":24,"n":3.0,"sum_n":3} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"sum_m":22,"n":6.0,"sum_n":16} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"sum_m":22,"n":9.0,"sum_n":25} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"sum_m":34,"n":null,"sum_n":25} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"sum_m":34,"n":null,"sum_n":9} diff --git a/python/pytests/golden/time_of_test/test_time_of.jsonl b/python/pytests/golden/time_of_test/test_time_of.jsonl new file mode 100644 index 000000000..ba326c2e2 --- /dev/null +++ b/python/pytests/golden/time_of_test/test_time_of.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_subsort":0,"_key_hash":12960666915911099378,"_key":"A","m":5.0,"time_of_m":"1996-12-19T16:39:57.000000000","n":10.0,"time_of_n":"1996-12-19T16:39:57.000000000"} +{"_time":"1996-12-19T16:39:58.000000000","_subsort":1,"_key_hash":2867199309159137213,"_key":"B","m":24.0,"time_of_m":"1996-12-19T16:39:58.000000000","n":3.0,"time_of_n":"1996-12-19T16:39:58.000000000"} +{"_time":"1996-12-19T16:39:59.000000000","_subsort":2,"_key_hash":12960666915911099378,"_key":"A","m":17.0,"time_of_m":"1996-12-19T16:39:59.000000000","n":6.0,"time_of_n":"1996-12-19T16:39:59.000000000"} +{"_time":"1996-12-19T16:40:00.000000000","_subsort":3,"_key_hash":12960666915911099378,"_key":"A","m":null,"time_of_m":"1996-12-19T16:40:00.000000000","n":9.0,"time_of_n":"1996-12-19T16:40:00.000000000"} +{"_time":"1996-12-19T16:40:01.000000000","_subsort":4,"_key_hash":12960666915911099378,"_key":"A","m":12.0,"time_of_m":"1996-12-19T16:40:01.000000000","n":null,"time_of_n":"1996-12-19T16:40:01.000000000"} +{"_time":"1996-12-19T16:40:02.000000000","_subsort":5,"_key_hash":12960666915911099378,"_key":"A","m":null,"time_of_m":"1996-12-19T16:40:02.000000000","n":null,"time_of_n":"1996-12-19T16:40:02.000000000"} diff --git a/python/pytests/golden/time_test/test_time_add_days.jsonl b/python/pytests/golden/time_test/test_time_add_days.jsonl new file mode 100644 index 000000000..d211cab18 --- /dev/null +++ b/python/pytests/golden/time_test/test_time_add_days.jsonl @@ -0,0 +1,4 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","time_plus_day":"1996-12-20T16:39:57.000000000"} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","time_plus_day":"1996-12-20T16:39:58.000000000"} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","time_plus_day":"1996-12-20T16:39:59.000000000"} +{"_time":"1997-01-18T16:40:00.000000000","_key":"A","time":"1997-01-18T16:40:00.000000000","time_plus_day":"1997-01-19T16:40:00.000000000"} diff --git a/python/pytests/golden/time_test/test_time_add_days_and_minutes.jsonl b/python/pytests/golden/time_test/test_time_add_days_and_minutes.jsonl new file mode 100644 index 000000000..aafcaefdc --- /dev/null +++ b/python/pytests/golden/time_test/test_time_add_days_and_minutes.jsonl @@ -0,0 +1,4 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","time_plus_day":"1996-12-22T16:40:57.000000000"} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","time_plus_day":"1996-12-22T16:40:58.000000000"} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","time_plus_day":"1996-12-22T16:40:59.000000000"} +{"_time":"1997-01-18T16:40:00.000000000","_key":"A","time":"1997-01-18T16:40:00.000000000","time_plus_day":"1997-01-21T16:41:00.000000000"} diff --git a/python/pytests/golden/time_test/test_time_add_hours.jsonl b/python/pytests/golden/time_test/test_time_add_hours.jsonl new file mode 100644 index 000000000..e21efb2c2 --- /dev/null +++ b/python/pytests/golden/time_test/test_time_add_hours.jsonl @@ -0,0 +1,4 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","time_plus_hours":"1996-12-19T17:39:57.000000000"} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","time_plus_hours":"1996-12-19T17:39:58.000000000"} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","time_plus_hours":"1996-12-19T17:39:59.000000000"} +{"_time":"1997-01-18T16:40:00.000000000","_key":"A","time":"1997-01-18T16:40:00.000000000","time_plus_hours":"1997-01-18T17:40:00.000000000"} diff --git a/python/pytests/golden/time_test/test_time_add_minutes.jsonl b/python/pytests/golden/time_test/test_time_add_minutes.jsonl new file mode 100644 index 000000000..80980d55f --- /dev/null +++ b/python/pytests/golden/time_test/test_time_add_minutes.jsonl @@ -0,0 +1,4 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","time_plus_minutes":"1996-12-19T16:40:57.000000000"} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","time_plus_minutes":"1996-12-19T16:40:58.000000000"} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","time_plus_minutes":"1996-12-19T16:40:59.000000000"} +{"_time":"1997-01-18T16:40:00.000000000","_key":"A","time":"1997-01-18T16:40:00.000000000","time_plus_minutes":"1997-01-18T16:41:00.000000000"} diff --git a/python/pytests/golden/time_test/test_time_add_seconds.jsonl b/python/pytests/golden/time_test/test_time_add_seconds.jsonl new file mode 100644 index 000000000..3bf7972ef --- /dev/null +++ b/python/pytests/golden/time_test/test_time_add_seconds.jsonl @@ -0,0 +1,4 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","time_plus_seconds":"1996-12-19T16:40:02.000000000"} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","time_plus_seconds":"1996-12-19T16:40:03.000000000"} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","time_plus_seconds":"1996-12-19T16:40:04.000000000"} +{"_time":"1997-01-18T16:40:00.000000000","_key":"A","time":"1997-01-18T16:40:00.000000000","time_plus_seconds":"1997-01-18T16:40:05.000000000"} diff --git a/python/pytests/golden/time_test/test_time_of_point.jsonl b/python/pytests/golden/time_test/test_time_of_point.jsonl new file mode 100644 index 000000000..2be62d93b --- /dev/null +++ b/python/pytests/golden/time_test/test_time_of_point.jsonl @@ -0,0 +1,4 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"time_of_m":"1996-12-19T16:39:57.000000000","n":10,"time_of_n":"1996-12-19T16:39:57.000000000"} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"time_of_m":"1996-12-19T16:39:58.000000000","n":3,"time_of_n":"1996-12-19T16:39:58.000000000"} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"time_of_m":"1996-12-19T16:39:59.000000000","n":6,"time_of_n":"1996-12-19T16:39:59.000000000"} +{"_time":"1997-01-18T16:40:00.000000000","_key":"A","m":null,"time_of_m":"1997-01-18T16:40:00.000000000","n":9,"time_of_n":"1997-01-18T16:40:00.000000000"} diff --git a/python/pytests/golden/timestream_test/test_timestream_cast.jsonl b/python/pytests/golden/timestream_test/test_timestream_cast.jsonl new file mode 100644 index 000000000..a3f21dd3a --- /dev/null +++ b/python/pytests/golden/timestream_test/test_timestream_cast.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","result":"1996-12-19T16:39:57.000000000"} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","result":"1996-12-19T16:39:58.000000000"} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","result":"1996-12-19T16:39:59.000000000"} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","result":"1996-12-19T16:40:00.000000000"} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","result":"1996-12-19T16:40:01.000000000"} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","result":"1996-12-19T16:40:02.000000000"} diff --git a/python/pytests/golden/timestream_test/test_timestream_preview.jsonl b/python/pytests/golden/timestream_test/test_timestream_preview.jsonl new file mode 100644 index 000000000..0561aadfc --- /dev/null +++ b/python/pytests/golden/timestream_test/test_timestream_preview.jsonl @@ -0,0 +1,4 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0,"n":10} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","time":"1996-12-19T16:39:58.000000000","key":"B","m":24.0,"n":3} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"n":6} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","time":"1996-12-19T16:40:00.000000000","key":"A","m":null,"n":9} diff --git a/python/pytests/golden/timestream_test/test_timestream_run_non_record.jsonl b/python/pytests/golden/timestream_test/test_timestream_run_non_record.jsonl new file mode 100644 index 000000000..758b1a236 --- /dev/null +++ b/python/pytests/golden/timestream_test/test_timestream_run_non_record.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","result":5.0} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","result":24.0} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","result":17.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","result":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","result":12.0} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","result":null} diff --git a/python/pytests/golden/union_test/test_union.jsonl b/python/pytests/golden/union_test/test_union.jsonl new file mode 100644 index 000000000..b792a97fc --- /dev/null +++ b/python/pytests/golden/union_test/test_union.jsonl @@ -0,0 +1,5 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","result":[5.0]} +{"_time":"1996-12-19T17:39:57.000000000","_key":"A","result":[5.0,6.0]} +{"_time":"1996-12-19T18:39:57.000000000","_key":"A","result":[null]} +{"_time":"1996-12-19T19:39:57.000000000","_key":"A","result":[6.0,7.0,8.0]} +{"_time":"1996-12-19T19:39:57.000000000","_key":"A","result":[6.0,7.0,8.0,9.0,10.0]} diff --git a/python/pytests/golden/variance_test/test_variance_since_true.jsonl b/python/pytests/golden/variance_test/test_variance_since_true.jsonl new file mode 100644 index 000000000..3e86d1b70 --- /dev/null +++ b/python/pytests/golden/variance_test/test_variance_since_true.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"m_variance":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"m_variance":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"m_variance":null} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"m_variance":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"m_variance":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"m_variance":null} diff --git a/python/pytests/golden/variance_test/test_variance_unwindowed.jsonl b/python/pytests/golden/variance_test/test_variance_unwindowed.jsonl new file mode 100644 index 000000000..922b72d7e --- /dev/null +++ b/python/pytests/golden/variance_test/test_variance_unwindowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"variance_m":null,"n":10.0,"variance_n":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"variance_m":null,"n":3.0,"variance_n":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"variance_m":36.0,"n":6.0,"variance_n":4.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"variance_m":36.0,"n":9.0,"variance_n":2.8888888889} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"variance_m":24.2222222222,"n":null,"variance_n":2.8888888889} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"variance_m":24.2222222222,"n":null,"variance_n":2.8888888889} diff --git a/python/pytests/golden/variance_test/test_variance_windowed.jsonl b/python/pytests/golden/variance_test/test_variance_windowed.jsonl new file mode 100644 index 000000000..fc6aa725d --- /dev/null +++ b/python/pytests/golden/variance_test/test_variance_windowed.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"A","m":5.0,"variance_m":null,"n":10.0,"variance_n":null} +{"_time":"1996-12-19T16:39:58.000000000","_key":"B","m":24.0,"variance_m":null,"n":3.0,"variance_n":null} +{"_time":"1996-12-19T16:39:59.000000000","_key":"A","m":17.0,"variance_m":36.0,"n":6.0,"variance_n":4.0} +{"_time":"1996-12-19T16:40:00.000000000","_key":"A","m":null,"variance_m":36.0,"n":9.0,"variance_n":null} +{"_time":"1996-12-19T16:40:01.000000000","_key":"A","m":12.0,"variance_m":24.2222222222,"n":null,"variance_n":null} +{"_time":"1996-12-19T16:40:02.000000000","_key":"A","m":null,"variance_m":24.2222222222,"n":null,"variance_n":null} diff --git a/python/pytests/golden/with_key_test/test_with_key_column.jsonl b/python/pytests/golden/with_key_test/test_with_key_column.jsonl new file mode 100644 index 000000000..0ed1387e0 --- /dev/null +++ b/python/pytests/golden/with_key_test/test_with_key_column.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"C","time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0,"new_key":"C"} +{"_time":"1996-12-19T16:39:58.000000000","_key":"D","time":"1996-12-19T16:39:58.000000000","key":"B","m":24.0,"new_key":"D"} +{"_time":"1996-12-19T16:39:59.000000000","_key":"C","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:00.000000000","_key":"C","time":"1996-12-19T16:40:00.000000000","key":"A","m":9.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:01.000000000","_key":"C","time":"1996-12-19T16:40:01.000000000","key":"A","m":12.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:02.000000000","_key":"C","time":"1996-12-19T16:40:02.000000000","key":"A","m":null,"new_key":"C"} diff --git a/python/pytests/golden/with_key_test/test_with_key_grouping.jsonl b/python/pytests/golden/with_key_test/test_with_key_grouping.jsonl new file mode 100644 index 000000000..0ed1387e0 --- /dev/null +++ b/python/pytests/golden/with_key_test/test_with_key_grouping.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"C","time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0,"new_key":"C"} +{"_time":"1996-12-19T16:39:58.000000000","_key":"D","time":"1996-12-19T16:39:58.000000000","key":"B","m":24.0,"new_key":"D"} +{"_time":"1996-12-19T16:39:59.000000000","_key":"C","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:00.000000000","_key":"C","time":"1996-12-19T16:40:00.000000000","key":"A","m":9.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:01.000000000","_key":"C","time":"1996-12-19T16:40:01.000000000","key":"A","m":12.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:02.000000000","_key":"C","time":"1996-12-19T16:40:02.000000000","key":"A","m":null,"new_key":"C"} diff --git a/python/pytests/golden/with_key_test/test_with_key_last.jsonl b/python/pytests/golden/with_key_test/test_with_key_last.jsonl new file mode 100644 index 000000000..0ed1387e0 --- /dev/null +++ b/python/pytests/golden/with_key_test/test_with_key_last.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"C","time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0,"new_key":"C"} +{"_time":"1996-12-19T16:39:58.000000000","_key":"D","time":"1996-12-19T16:39:58.000000000","key":"B","m":24.0,"new_key":"D"} +{"_time":"1996-12-19T16:39:59.000000000","_key":"C","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:00.000000000","_key":"C","time":"1996-12-19T16:40:00.000000000","key":"A","m":9.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:01.000000000","_key":"C","time":"1996-12-19T16:40:01.000000000","key":"A","m":12.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:02.000000000","_key":"C","time":"1996-12-19T16:40:02.000000000","key":"A","m":null,"new_key":"C"} diff --git a/python/pytests/golden/with_key_test/test_with_key_literal.jsonl b/python/pytests/golden/with_key_test/test_with_key_literal.jsonl new file mode 100644 index 000000000..92556d24b --- /dev/null +++ b/python/pytests/golden/with_key_test/test_with_key_literal.jsonl @@ -0,0 +1,6 @@ +{"_time":"1996-12-19T16:39:57.000000000","_key":"literal_key","time":"1996-12-19T16:39:57.000000000","key":"A","m":5.0,"new_key":"C"} +{"_time":"1996-12-19T16:39:58.000000000","_key":"literal_key","time":"1996-12-19T16:39:58.000000000","key":"B","m":24.0,"new_key":"D"} +{"_time":"1996-12-19T16:39:59.000000000","_key":"literal_key","time":"1996-12-19T16:39:59.000000000","key":"A","m":17.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:00.000000000","_key":"literal_key","time":"1996-12-19T16:40:00.000000000","key":"A","m":9.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:01.000000000","_key":"literal_key","time":"1996-12-19T16:40:01.000000000","key":"A","m":12.0,"new_key":"C"} +{"_time":"1996-12-19T16:40:02.000000000","_key":"literal_key","time":"1996-12-19T16:40:02.000000000","key":"A","m":null,"new_key":"C"} diff --git a/python/pytests/if_test.py b/python/pytests/if_test.py new file mode 100644 index 000000000..d30e90eab --- /dev/null +++ b/python/pytests/if_test.py @@ -0,0 +1,37 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_if_(source, golden) -> None: + m = source.col("m") + n = source.col("n") + condition_m = m > 15 + condition_n = n > 5 + golden.jsonl( + kd.record( + { + "m": m, + "condition_m": condition_m, + "if_m": m.if_(condition_m), + "n": n, + "condition_n": condition_n, + "if_n": n.if_(condition_n), + } + ) + ) diff --git a/python/pytests/jsonl_string_source_test.py b/python/pytests/jsonl_string_source_test.py new file mode 100644 index 000000000..b7191a48a --- /dev/null +++ b/python/pytests/jsonl_string_source_test.py @@ -0,0 +1,36 @@ +import kaskada as kd + + +def test_read_jsonl(golden) -> None: + source = kd.sources.JsonlString( + '{"time": "1996-12-19T16:39:57", "user": "A", "m": 5, "n": 10}', + time_column_name="time", + key_column_name="user", + ) + golden.jsonl(source) + + source.add_string( + """ + {"time": "1996-12-19T16:40:57", "user": "A", "m": 8, "n": 10} + {"time": "1996-12-19T16:41:57", "user": "B", "m": 5} + """ + ) + + golden.jsonl(source) + + +def test_read_jsonl_lists(golden) -> None: + source = kd.sources.JsonlString( + '{"time": "1996-12-19T16:39:57", "user": "A", "m": [5, 10], "n": 10}', + time_column_name="time", + key_column_name="user", + ) + golden.jsonl(source) + + source.add_string( + """ + {"time": "1996-12-19T16:40:57", "user": "A", "m": [], "n": 10} + {"time": "1996-12-19T16:41:57", "user": "A", "n": 10} + """ + ) + golden.jsonl(source) diff --git a/python/pytests/lag_test.py b/python/pytests/lag_test.py new file mode 100644 index 000000000..5fe59c88d --- /dev/null +++ b/python/pytests/lag_test.py @@ -0,0 +1,52 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_lag(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "lag_1_m": m.lag(1), + "lag_2_m": m.lag(2), + "n": n, + "lag_1_n": n.lag(1), + "lag_2_n": n.lag(2), + } + ) + ) + + +def test_lag_struct(source, golden) -> None: + golden.jsonl(source.lag(1)) + + +def test_lag_list(source, golden) -> None: + m = source.col("m") + golden.jsonl( + kd.record( + { + "m": m, + "list_m": m.collect(max=None), + "lag_list_m": m.collect(max=None).lag(1), + } + ) + ) diff --git a/python/pytests/length_test.py b/python/pytests/length_test.py new file mode 100644 index 000000000..eeac4d92c --- /dev/null +++ b/python/pytests/length_test.py @@ -0,0 +1,33 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n,str", + '1996-12-19T16:39:57,A,5,10,"apple"', + '1996-12-19T16:39:58,B,24,3,"dog"', + '1996-12-19T16:39:59,A,17,6,"carrot"', + "1996-12-19T16:40:00,A,,9,", + '1996-12-19T16:40:01,A,12,,"eggplant"', + '1996-12-19T16:40:02,A,,,"fig"', + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_length(source, golden) -> None: + my_str = source.col("str") + list = my_str.collect(max=None) + golden.jsonl( + kd.record( + { + "str": my_str, + "len_key": my_str.length(), + "list": list, + "len_list": list.length(), + } + ) + ) diff --git a/python/pytests/lookup_test.py b/python/pytests/lookup_test.py new file mode 100644 index 000000000..c94b62002 --- /dev/null +++ b/python/pytests/lookup_test.py @@ -0,0 +1,49 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def key_source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,state", + "1996-12-19T16:39:57,A,WA", + "1996-12-19T16:39:58,B,NC", + "1996-12-19T16:39:59,A,WA", + "1996-12-19T16:40:00,A,NC", + "1996-12-19T16:40:01,A,SC", + "1996-12-19T16:40:02,A,WA", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +@pytest.fixture(scope="module") +def foreign_source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,NC,10,5", + "1996-12-19T16:39:58,NC,24,3", + "1996-12-19T16:39:59,WA,17,6", + "1996-12-19T16:40:00,NC,,9", + "1996-12-19T16:40:01,WA,12,", + "1996-12-19T16:40:02,WA,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_lookup(key_source, foreign_source, golden) -> None: + state = key_source.col("state") + foreign_value = foreign_source.col("m") + last_foreign_value = foreign_source.col("m").last() + golden.jsonl( + kd.record( + { + "state": state, + "lookup": foreign_value.lookup(state), + "lookup_last": last_foreign_value.lookup(state), + } + ) + ) diff --git a/python/pytests/math_test.py b/python/pytests/math_test.py new file mode 100644 index 000000000..d44c8dee6 --- /dev/null +++ b/python/pytests/math_test.py @@ -0,0 +1,33 @@ +import kaskada as kd +import pytest + + +@pytest.fixture +def source_int64() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_math_int64(golden, source_int64) -> None: + m = source_int64.col("m") + n = source_int64.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "n": n, + "add": m + n, + "sub": m - n, + } + ) + ) diff --git a/python/pytests/null_test.py b/python/pytests/null_test.py new file mode 100644 index 000000000..150601c12 --- /dev/null +++ b/python/pytests/null_test.py @@ -0,0 +1,54 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_null_if(source, golden) -> None: + m = source.col("m") + n = source.col("n") + condition_m = m > 15 + condition_n = n > 5 + golden.jsonl( + kd.record( + { + "m": m, + "condition_m": condition_m, + "null_if_m": m.null_if(condition_m), + "n": n, + "condition_n": condition_n, + "null_if_n": n.null_if(condition_n), + } + ) + ) + + +def test_is_null(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "is_null_m": m.is_null(), + "is_not_null_m": m.is_not_null(), + "n": n, + "is_null_n": n.is_null(), + "is_not_null_n": n.is_not_null(), + } + ) + ) diff --git a/python/pytests/pandas_source_test.py b/python/pytests/pandas_source_test.py new file mode 100644 index 000000000..81dd263f0 --- /dev/null +++ b/python/pytests/pandas_source_test.py @@ -0,0 +1,51 @@ +import kaskada as kd +import pandas as pd + + +def test_float_seconds(golden) -> None: + data = {"time": [1671477472.026119], "user": ["tom"]} + df = pd.DataFrame(data) + table = kd.sources.Pandas( + df, time_column_name="time", key_column_name="user", time_unit="s" + ) + + golden.jsonl(table) + + +def test_float_milliseconds(golden) -> None: + data = {"time": [1671477472026.119], "user": ["tom"]} + df = pd.DataFrame(data) + table = kd.sources.Pandas( + df, time_column_name="time", key_column_name="user", time_unit="ms" + ) + + golden.jsonl(table) + + +def test_float_nanoseconds(golden) -> None: + data = {"time": [1671477472026119000], "user": ["tom"]} + df = pd.DataFrame(data) + table = kd.sources.Pandas(df, time_column_name="time", key_column_name="user") + + golden.jsonl(table) + + +def test_add_dataframe(golden) -> None: + df1 = pd.DataFrame( + { + "time": [1000000000000], + "key": ["a"], + } + ) + + table = kd.sources.Pandas(df1, time_column_name="time", key_column_name="key") + golden.jsonl(table) + + df2 = pd.DataFrame( + { + "time": [1000000000000], + "key": ["a"], + } + ) + table.add_data(df2) + golden.jsonl(table) diff --git a/python/pytests/parquet_source_test.py b/python/pytests/parquet_source_test.py new file mode 100644 index 000000000..6df717202 --- /dev/null +++ b/python/pytests/parquet_source_test.py @@ -0,0 +1,13 @@ +import kaskada as kd + + +def test_read_parquet(golden) -> None: + source = kd.sources.Parquet( + "../testdata/purchases/purchases_part1.parquet", + time_column_name="purchase_time", + key_column_name="customer_id", + ) + golden.jsonl(source) + + source.add_file("../testdata/purchases/purchases_part2.parquet") + golden.jsonl(source) diff --git a/python/pytests/pylist_source_test.py b/python/pytests/pylist_source_test.py new file mode 100644 index 000000000..09e6759ce --- /dev/null +++ b/python/pytests/pylist_source_test.py @@ -0,0 +1,56 @@ +import kaskada as kd + + +def test_read_pylist(golden) -> None: + source = kd.sources.PyList( + [{"time": "1996-12-19T16:39:57", "user": "A", "m": 5, "n": 10}], + time_column_name="time", + key_column_name="user", + ) + golden.jsonl(source) + + source.add_rows( + [ + {"time": "1996-12-19T16:40:57", "user": "A", "m": 8, "n": 10}, + {"time": "1996-12-19T16:41:57", "user": "B", "m": 5}, + ] + ) + + golden.jsonl(source) + source.add_rows({"time": "1996-12-19T16:42:57", "user": "A", "m": 8, "n": 10}) + golden.jsonl(source) + + +def test_read_pylist_lists(golden) -> None: + source = kd.sources.PyList( + [{"time": "1996-12-19T16:39:57", "user": "A", "m": [5, 10], "n": 10}], + time_column_name="time", + key_column_name="user", + ) + golden.jsonl(source) + + source.add_rows( + [ + {"time": "1996-12-19T16:40:57", "user": "A", "m": [], "n": 10}, + {"time": "1996-12-19T16:41:57", "user": "A", "n": 10}, + ] + ) + golden.jsonl(source) + + +def test_read_pylist_ignore_column(golden) -> None: + # Schema is determined from first row, and doesn't contain an "m" column. + source = kd.sources.PyList( + [{"time": "1996-12-19T16:39:57", "user": "A", "n": 10}], + time_column_name="time", + key_column_name="user", + ) + golden.jsonl(source) + + source.add_rows( + [ + {"time": "1996-12-19T16:40:57", "user": "A", "m": 83, "n": 10}, + {"time": "1996-12-19T16:41:57", "user": "A", "m": 12}, + ] + ) + golden.jsonl(source) diff --git a/python/pytests/record_test.py b/python/pytests/record_test.py new file mode 100644 index 000000000..c55698517 --- /dev/null +++ b/python/pytests/record_test.py @@ -0,0 +1,50 @@ +import kaskada as kd +import pytest + + +@pytest.fixture +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_record(source, golden) -> None: + m = source.col("m") + n = source.col("n") + + golden.jsonl( + kd.record( + { + "m": m, + "n": n, + } + ) + ) + + +def test_extend_record(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl(source.extend({"add": m + n})) + + +def test_extend_input(source, golden) -> None: + golden.jsonl(source.extend(lambda input: {"add": input.col("m") + input.col("n")})) + + +def test_select_record(source, golden) -> None: + golden.jsonl(source.select("n")) + + +def test_remove_record(source, golden) -> None: + golden.jsonl(source.remove("n")) diff --git a/python/pytests/result_test.py b/python/pytests/result_test.py new file mode 100644 index 000000000..dee608d36 --- /dev/null +++ b/python/pytests/result_test.py @@ -0,0 +1,80 @@ +import kaskada as kd +import pytest + + +@pytest.fixture +def source_int64() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_iter_pandas(golden, source_int64) -> None: + batches = source_int64.run(row_limit=4, max_batch_size=2).iter_pandas() + + # 4 rows, max 2 per batch = 2 batches + golden.jsonl(next(batches)) + golden.jsonl(next(batches)) + with pytest.raises(StopIteration): + next(batches) + + +def test_iter_rows(golden, source_int64) -> None: + results = source_int64.run(row_limit=2).iter_rows() + assert next(results)["m"] == 5 + assert next(results)["m"] == 24 + with pytest.raises(StopIteration): + next(results) + + +@pytest.mark.asyncio +async def test_iter_pandas_async(golden, source_int64) -> None: + batches = source_int64.run(row_limit=4, max_batch_size=2).iter_pandas_async() + + # 4 rows, max 2 per batch = 2 batches. + + # We could test using `await anext(batches)`, but that wasn't introduced + # until Python 3.10. Since everything else works in 3.8 and 3.9, we just + # call `__anext__` directly. + golden.jsonl(await batches.__anext__()) + golden.jsonl(await batches.__anext__()) + with pytest.raises(StopAsyncIteration): + await batches.__anext__() + + +@pytest.mark.asyncio +async def test_iter_pandas_async_materialize(golden, source_int64) -> None: + data2 = "\n".join( + [ + "time,key,m,n", + "1996-12-20T16:39:57,A,5,10", + "1996-12-20T16:39:58,B,24,3", + "1996-12-20T16:39:59,A,17,6", + "1996-12-20T16:40:00,C,,9", + "1996-12-20T16:40:01,A,12,", + "1996-12-20T16:40:02,A,,", + ] + ) + + execution = source_int64.run(materialize=True) + batches = execution.iter_pandas_async() + + # Await the first batch. + golden.jsonl(await batches.__anext__()) + + # Add data and await the second batch. + source_int64.add_string(data2) + golden.jsonl(await batches.__anext__()) + + execution.stop() + with pytest.raises(StopAsyncIteration): + print(await batches.__anext__()) diff --git a/python/pytests/seconds_since_previous_test.py b/python/pytests/seconds_since_previous_test.py new file mode 100644 index 000000000..51d804db7 --- /dev/null +++ b/python/pytests/seconds_since_previous_test.py @@ -0,0 +1,32 @@ +import kaskada as kd +import pyarrow as pa +import pytest + + +@pytest.fixture +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n,t", + "1996-12-19T16:39:57,A,5,10,1996-12-19T16:42:57", + "1996-12-19T16:39:58,B,24,3,1996-12-19T16:39:59", + "1996-12-19T16:39:59,A,17,6,", + "1996-12-19T16:40:00,A,,9,1996-12-19T16:41:00", + "1996-12-19T16:40:01,A,12,,1996-12-19T16:42:01", + "1996-12-19T16:40:02,A,,,1996-12-19T16:43:02", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_seconds_since_previous(golden, source) -> None: + t = source.col("time") + golden.jsonl( + kd.record( + { + "seconds_since": t.seconds_since_previous().cast(pa.int64()), + "seconds_since_1": t.seconds_since_previous(1).cast(pa.int64()), + "seconds_since_2": t.seconds_since_previous(2).cast(pa.int64()), + } + ) + ) diff --git a/python/pytests/seconds_since_test.py b/python/pytests/seconds_since_test.py new file mode 100644 index 000000000..5f6dc0b89 --- /dev/null +++ b/python/pytests/seconds_since_test.py @@ -0,0 +1,46 @@ +import datetime as datetime + +import kaskada as kd +import pyarrow as pa +import pytest + + +@pytest.fixture +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n,t", + "1996-12-19T16:39:57,A,5,10,1996-12-19T16:42:57", + "1996-12-19T16:39:58,B,24,3,1996-12-19T16:39:59", + "1996-12-19T16:39:59,A,17,6,", + "1996-12-19T16:40:00,A,,9,1996-12-19T16:41:00", + "1996-12-19T16:40:01,A,12,,1996-12-19T16:42:01", + "1996-12-19T16:40:02,A,,,1996-12-19T16:43:02", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_seconds_since(golden, source) -> None: + t1 = source.col("time") + t2 = source.col("t") + golden.jsonl( + kd.record( + { + "t1": t1, + "t2": t2, + "seconds_since_t1": t2.seconds_since(t1).cast(pa.int64()), + "seconds_since_t2": t1.seconds_since(t2).cast(pa.int64()), + } + ) + ) + + +def test_seconds_since_datetime(golden, source) -> None: + t = source.col("time") + dt = datetime.datetime(1996, 12, 19, 16, 39, 50, tzinfo=datetime.timezone.utc) + golden.jsonl( + kd.record( + {"t1": t, "seconds_since_literal": t.seconds_since(dt).cast(pa.int64())} + ) + ) diff --git a/python/pytests/shift_by_test.py b/python/pytests/shift_by_test.py new file mode 100644 index 000000000..7d838105d --- /dev/null +++ b/python/pytests/shift_by_test.py @@ -0,0 +1,55 @@ +from datetime import timedelta + +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1997-01-18T16:40:00,A,,9", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_shift_by_timedelta(source, golden) -> None: + time = source.col("time") + golden.jsonl( + kd.record( + { + "time": time, + "shift_by_1_s": time.shift_by(timedelta(seconds=1)), + "shift_by_1_m": time.shift_by(timedelta(minutes=1)), + } + ) + ) + + +def test_shift_collect(source, golden) -> None: + golden.jsonl( + source.record( + lambda input: { + "time": input.col("time"), + "ms": input.col("m").collect(max=10), + "m": input.col("m"), + } + ) + # Currently, the Pandas comparison method being used doesn't handle + # date-time like fields nested within a list. So we expand things out. + # + # TODO: Improve the golden testing so this isn't necessary. + .extend( + lambda base: { + "shift_by_1_s_time": base.shift_by(timedelta(seconds=1)).col("time"), + "shift_by_1_s_ms": base.shift_by(timedelta(seconds=1)).col("ms"), + "shift_by_1_m_time": base.shift_by(timedelta(minutes=1)).col("time"), + "shift_by_1_m_ms": base.shift_by(timedelta(minutes=1)).col("ms"), + } + ) + ) diff --git a/python/pytests/shift_to_test.py b/python/pytests/shift_to_test.py new file mode 100644 index 000000000..bd14d354e --- /dev/null +++ b/python/pytests/shift_to_test.py @@ -0,0 +1,43 @@ +from datetime import datetime +from datetime import timedelta + +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1997-01-18T16:40:00,A,,9", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +@pytest.mark.skip(reason="shift to literal not supported") +def test_shift_to_datetime(source, golden) -> None: + time = source.col("time") + shift_to_datetime = datetime(1996, 12, 25, 0, 0, 0) + golden.jsonl( + kd.record( + {"time": time, "shift_to_time_plus_1_day": time.shift_to(shift_to_datetime)} + ) + ) + + +def test_shift_to_column(source, golden) -> None: + time = source.col("time") + shift_by_timedelta = timedelta(seconds=10) + golden.jsonl( + kd.record( + { + "time": time, + "time_plus_seconds": time.shift_to(time.time() + shift_by_timedelta), + } + ) + ) diff --git a/python/pytests/shift_until_test.py b/python/pytests/shift_until_test.py new file mode 100644 index 000000000..92f004965 --- /dev/null +++ b/python/pytests/shift_until_test.py @@ -0,0 +1,33 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,10,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_shift_until_predicate(source, golden) -> None: + m = source.col("m") + predicate = m.sum() > 30 + golden.jsonl( + kd.record( + { + "m": m, + "sum_m": m.sum(), + "predicate": predicate, + "shift_until": m.last().shift_until(predicate), + } + ) + ) diff --git a/python/pytests/source_test.py b/python/pytests/source_test.py new file mode 100644 index 000000000..1d8e8a72e --- /dev/null +++ b/python/pytests/source_test.py @@ -0,0 +1,50 @@ +import kaskada as kd +import pyarrow as pa +import pytest + + +def test_table_valid() -> None: + schema = pa.schema( + [ + pa.field("time", pa.int32(), nullable=False), + pa.field("key", pa.int64(), nullable=False), + ] + ) + + kd.sources.Source(schema, time_column_name="time", key_column_name="key") + + +def test_table_invalid_names() -> None: + schema = pa.schema( + [ + pa.field("time", pa.int32(), nullable=False), + pa.field("key", pa.int64(), nullable=False), + ] + ) + + with pytest.raises(KeyError): + # Currently, this doesn't propagate the suggestions from + # existing column names from Sparrow. + # TODO: Do that. + kd.sources.Source( + schema, time_column_name="non_existant_time", key_column_name="key" + ) + + with pytest.raises(KeyError): + # Currently, this doesn't propagate the suggestions from + # existing column names from Sparrow. + # TODO: Do that. + kd.sources.Source( + schema, time_column_name="time", key_column_name="non_existant_key" + ) + + with pytest.raises(KeyError): + # Currently, this doesn't propagate the suggestions from + # existing column names from Sparrow. + # TODO: Do that. + kd.sources.Source( + schema, + time_column_name="time", + key_column_name="key", + subsort_column_name="non_existant_subsort", + ) diff --git a/python/pytests/time_test.py b/python/pytests/time_test.py new file mode 100644 index 000000000..73063acb5 --- /dev/null +++ b/python/pytests/time_test.py @@ -0,0 +1,66 @@ +from datetime import timedelta + +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1997-01-18T16:40:00,A,,9", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_time_of_point(source, golden) -> None: + m = source.col("m") + n = source.col("n") + golden.jsonl( + kd.record( + { + "m": m, + "time_of_m": m.time(), + "n": n, + "time_of_n": n.time(), + } + ) + ) + + +def test_time_add_days(source, golden) -> None: + time = source.col("time") + golden.jsonl(kd.record({"time": time, "time_plus_day": time + timedelta(days=1)})) + + +def test_time_add_hours(source, golden) -> None: + time = source.col("time") + golden.jsonl( + kd.record({"time": time, "time_plus_hours": time + timedelta(hours=1)}) + ) + + +def test_time_add_minutes(source, golden) -> None: + time = source.col("time") + golden.jsonl( + kd.record({"time": time, "time_plus_minutes": time + timedelta(minutes=1)}) + ) + + +def test_time_add_days_and_minutes(source, golden) -> None: + time = source.col("time") + golden.jsonl( + kd.record({"time": time, "time_plus_day": time + timedelta(days=3, minutes=1)}) + ) + + +def test_time_add_seconds(source, golden) -> None: + time = source.col("time") + golden.jsonl( + kd.record({"time": time, "time_plus_seconds": time + timedelta(seconds=5)}) + ) diff --git a/python/pytests/timestream_test.py b/python/pytests/timestream_test.py new file mode 100644 index 000000000..b482e4c61 --- /dev/null +++ b/python/pytests/timestream_test.py @@ -0,0 +1,165 @@ +import sys + +import kaskada as kd +import pyarrow as pa +import pytest + + +@pytest.fixture(scope="module") +def source1() -> kd.sources.Source: + schema = pa.schema( + [ + pa.field("time", pa.int32(), nullable=False), + pa.field("key", pa.int64(), nullable=False), + pa.field("x", pa.float64()), + pa.field("y", pa.int32()), + ] + ) + return kd.sources.Source(schema, time_column_name="time", key_column_name="key") + + +def test_field_ref(source1) -> None: + field_ref_long = source1.col("x") + assert field_ref_long.data_type == pa.float64() + + +def test_field_ref_no_such_field(source1) -> None: + with pytest.raises(ValueError, match="Illegal field reference"): + # This raises a "NoSuchAttribute" error. + # We currently catch this in Python and don't do anything to + # suggest possible alternatives. + # + # TODO: We should either surface the Sparrow error which suggests + # possible field names, or improve the Python error. + source1.col("foo") + + +def test_field_ref_not_a_struct(source1) -> None: + with pytest.raises( + TypeError, match="Cannot access column 'x' of non-record type 'double'" + ): + source1.col("x").col("x") + + +def test_timestream_math(source1) -> None: + x = source1.col("x") + assert (x + 1).data_type == x.data_type + assert (1 + x).data_type == x.data_type + assert (x - 1).data_type == x.data_type + assert (1 - x).data_type == x.data_type + assert (x * 1).data_type == x.data_type + assert (1 * x).data_type == x.data_type + assert (1 / x).data_type == x.data_type + assert (1 + x).data_type == x.data_type + + assert (1 + x).neg().data_type == x.data_type + + +def test_timestream_comparison(source1) -> None: + x = source1.col("x") + + # Tests the various comparison operators. Even though Python doesn't have a + # `__rgt__` (reverse gt) dunder method, if the LHS doesn't support `gt` with + # the RHS it seems to try `rhs lt lhs`. + assert (x > 1).data_type == pa.bool_() + assert (1 > x).data_type == pa.bool_() + assert (x < 1).data_type == pa.bool_() + assert (1 < x).data_type == pa.bool_() + assert (x >= 1).data_type == pa.bool_() + assert (1 >= x).data_type == pa.bool_() + assert (x <= 1).data_type == pa.bool_() + assert (1 <= x).data_type == pa.bool_() + + # For `eq` and `ne` we only support timestream on the LHS since it is a method. + # We can't overload `__eq__` since that must take any RHS and must return `bool`. + assert x.eq(1).data_type == pa.bool_() + assert x.ne(1).data_type == pa.bool_() + + a = x > 1 + b = x.eq(1) + assert a.not_().data_type == pa.bool_() + assert a.and_(b).data_type == pa.bool_() + assert a.or_(b).data_type == pa.bool_() + + +def test_timestream_eq_warning(source1) -> None: + x = source1.col("x") + y = source1.col("y") + + with pytest.warns(UserWarning, match="Use 'eq'"): + x == y # noqa : B015 + + with pytest.warns(UserWarning, match="Use 'ne'"): + x != y # noqa : B015 + + +def test_timestream_arithmetic_types(source1) -> None: + x = source1.col("x") + assert (x.eq(1)).data_type == pa.bool_() + assert (x + 1).data_type == pa.float64() + assert (x + source1.col("y")).data_type == pa.float64() + + # TODO: This should raise a TypeError, but currently the Rust + # code always raises a ValueError, so everything comes out + # looking the same. + with pytest.raises(ValueError) as e: + x.eq(1) + source1.col("y") + assert "Incompatible argument types" in str(e) + if sys.version_info >= (3, 11): + assert "Arg[0]: Timestream[bool]" in e.value.__notes__ + assert "Arg[1]: Timestream[int32]" in e.value.__notes__ + + +def test_timestream_preview(golden) -> None: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + source = kd.sources.CsvString( + content, time_column_name="time", key_column_name="key" + ) + + golden.jsonl(source.preview(limit=4)) + + +def test_timestream_run_non_record(golden) -> None: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + source = kd.sources.CsvString( + content, time_column_name="time", key_column_name="key" + ) + golden.jsonl(source.col("m")) + + +def test_timestream_cast(golden) -> None: + content = "\n".join( + [ + "time,key,m,n", + "1996-12-19T16:39:57,A,5,10", + "1996-12-19T16:39:58,B,24,3", + "1996-12-19T16:39:59,A,17,6", + "1996-12-19T16:40:00,A,,9", + "1996-12-19T16:40:01,A,12,", + "1996-12-19T16:40:02,A,,", + ] + ) + source = kd.sources.CsvString( + content, time_column_name="time", key_column_name="key" + ) + golden.jsonl(source.col("time").cast(pa.timestamp("ns"))) diff --git a/python/pytests/udf_test.py b/python/pytests/udf_test.py new file mode 100644 index 000000000..c60b1f6ee --- /dev/null +++ b/python/pytests/udf_test.py @@ -0,0 +1,26 @@ +import pandas as pd +import pyarrow as pa +from kaskada._ffi import call_udf +from kaskada.udf import Udf +from kaskada.udf import fenl_udf + + +@fenl_udf("add", "add(x: number, y: number) -> number") +def add(x: pd.Series, y: pd.Series) -> pd.Series: + return x + y + + +def test_numeric_udf_pure_python() -> None: + assert isinstance(add, Udf) + + x = pa.array([1, 12, 17, 23, 28], type=pa.int8()) + y = pa.array([1, 13, 18, 20, 4], type=pa.int8()) + result = add.run_pyarrow(pa.int8(), x, y) + assert result == pa.array([2, 25, 35, 43, 32], type=pa.int8()) + + +def test_numeric_udf_rust() -> None: + x = pa.array([1, 12, 17, 23, 28], type=pa.int8()) + y = pa.array([1, 13, 18, 20, 4], type=pa.int8()) + result = call_udf(add, pa.int8(), x, y) + assert result == pa.array([2, 25, 35, 43, 32], type=pa.int8()) diff --git a/python/pytests/union_test.py b/python/pytests/union_test.py new file mode 100644 index 000000000..060f05b1e --- /dev/null +++ b/python/pytests/union_test.py @@ -0,0 +1,22 @@ +import kaskada as kd + + +def test_union(golden) -> None: + source = kd.sources.PyList( + [ + {"time": "1996-12-19T16:39:57", "user": "A", "m": [5], "n": []}, + {"time": "1996-12-19T17:39:57", "user": "A", "m": [], "n": [5, 6]}, + {"time": "1996-12-19T18:39:57", "user": "A", "m": [None]}, + {"time": "1996-12-19T19:39:57", "user": "A", "m": [6, 7], "n": [6, 7, 8]}, + { + "time": "1996-12-19T19:39:57", + "user": "A", + "m": [6, 7, 8, 6], + "n": [9, 8, 10], + }, + ], + time_column_name="time", + key_column_name="user", + ) + + golden.jsonl(source.col("m").union(source.col("n"))) diff --git a/python/pytests/with_key_test.py b/python/pytests/with_key_test.py new file mode 100644 index 000000000..a51cf41b4 --- /dev/null +++ b/python/pytests/with_key_test.py @@ -0,0 +1,37 @@ +import kaskada as kd +import pytest + + +@pytest.fixture(scope="module") +def source() -> kd.sources.CsvString: + content = "\n".join( + [ + "time,key,m,new_key", + "1996-12-19T16:39:57,A,5,C", + "1996-12-19T16:39:58,B,24,D", + "1996-12-19T16:39:59,A,17,C", + "1996-12-19T16:40:00,A,9,C", + "1996-12-19T16:40:01,A,12,C", + "1996-12-19T16:40:02,A,,C", + ] + ) + return kd.sources.CsvString(content, time_column_name="time", key_column_name="key") + + +def test_with_key_literal(source, golden) -> None: + golden.jsonl(source.with_key("literal_key")) + + +def test_with_key_column(source, golden) -> None: + new_key = source.col("new_key") + golden.jsonl(source.with_key(new_key)) + + +def test_with_key_grouping(source, golden) -> None: + new_key = source.col("new_key") + grouping = "user" + golden.jsonl(source.with_key(new_key, grouping)) + + +def test_with_key_last(source, golden) -> None: + golden.jsonl(source.with_key(source.col("new_key")).last()) diff --git a/python/requirements-dev.txt b/python/requirements-dev.txt new file mode 100644 index 000000000..9ea7f14b6 --- /dev/null +++ b/python/requirements-dev.txt @@ -0,0 +1,3 @@ +pytest>=3.5.0 +pip>=21.3 +maturin>=0.12,<0.13 diff --git a/python/src/error.rs b/python/src/error.rs new file mode 100644 index 000000000..07ae89768 --- /dev/null +++ b/python/src/error.rs @@ -0,0 +1,95 @@ +use error_stack::Report; +use pyo3::exceptions::PyRuntimeError; +use pyo3::PyErr; + +#[derive(derive_more::Display, Debug)] +pub enum ErrorContext { + #[display(fmt = "error in kaskada Rust code")] + Ffi, + #[display(fmt = "error in kaskada Pyo3 or Python code")] + Python, + #[display(fmt = "result already collected")] + ResultAlreadyCollected, +} + +impl error_stack::Context for ErrorContext {} + +pub struct Error(error_stack::Report); + +pub type Result = std::result::Result; + +trait UserErrorInfo: Sync + Send { + /// If this error is a user facing error, return it. + /// + /// When producing the Python error, the outer-most user facing error will + /// be reported as the message> + fn user_facing_cause(&self) -> Option; +} + +impl std::fmt::Debug for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Debug::fmt(&self.0, f) + } +} + +impl std::fmt::Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Debug::fmt(&self.0, f) + } +} + +impl std::error::Error for Error { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + self.0.as_error().source() + } + + #[cfg(nightly)] + fn provide<'a>(&'a self, demand: &mut Demand<'a>) { + self.0.frames().for_each(|frame| frame.provide(demand)); + } +} + +impl From> for Error { + fn from(value: error_stack::Report) -> Self { + Error(value.change_context(ErrorContext::Ffi)) + } +} + +impl From for Error { + fn from(value: PyErr) -> Self { + Error(Report::from(value).change_context(ErrorContext::Python)) + } +} + +impl From for PyErr { + fn from(value: Error) -> Self { + tracing::error!("Reporting error from kaskada FFI:{value:#}"); + value + .0 + .frames() + .find_map(|f| { + f.downcast_ref::<&dyn UserErrorInfo>() + .and_then(|info| info.user_facing_cause()) + }) + .unwrap_or_else(|| PyRuntimeError::new_err(format!("{value:#}"))) + } +} + +pub(crate) trait IntoError { + type Result; + fn into_error(self) -> Self::Result; +} + +impl> IntoError for E { + type Result = Error; + fn into_error(self) -> Self::Result { + self.into() + } +} + +impl> IntoError for std::result::Result { + type Result = Result; + fn into_error(self) -> Self::Result { + self.map_err(|e| e.into()) + } +} diff --git a/python/src/execution.rs b/python/src/execution.rs new file mode 100644 index 000000000..51b663289 --- /dev/null +++ b/python/src/execution.rs @@ -0,0 +1,102 @@ +use std::sync::Arc; + +use arrow::pyarrow::ToPyArrow; +use futures::TryFutureExt; +use pyo3::prelude::*; +use sparrow_session::Execution as RustExecution; +use tokio::sync::{Mutex, MutexGuard, OwnedMutexGuard}; + +use crate::error::{ErrorContext, IntoError, Result}; + +/// Kaskada execution object. +#[pyclass] +#[derive(Clone)] +pub(crate) struct Execution { + execution: Arc>>, +} + +impl Execution { + pub(crate) fn new(execution: RustExecution) -> Self { + Self { + execution: Arc::new(Mutex::new(Some(execution))), + } + } +} + +impl Execution { + /// Return a mutex-locked `RustExecution`. + /// + /// Error if the execution has already been completed (consumed). + fn execution(&self) -> Result>> { + let execution_opt = self.execution.blocking_lock(); + if execution_opt.is_none() { + return Err(error_stack::report!(ErrorContext::ResultAlreadyCollected).into()); + } + Ok(execution_opt) + } + + /// Same as `execution` but returning an `OwnedMutexGuard` which is `Send`. + async fn owned_execution(&self) -> Result>> { + let execution_opt = self.execution.clone().lock_owned().await; + if execution_opt.is_none() { + return Err(error_stack::report!(ErrorContext::ResultAlreadyCollected).into()); + } + Ok(execution_opt) + } + + fn take_execution(&self) -> Result { + Ok(self.execution()?.take().unwrap()) + } +} + +impl Execution {} + +#[pymethods] +impl Execution { + fn collect_pyarrow(&mut self, py: Python<'_>) -> Result> { + let execution = self.take_execution()?; + let batches = execution.collect_all_blocking()?; + let results = batches + .into_iter() + .map(|batch| batch.to_pyarrow(py)) + .collect::>>()?; + Ok(results) + } + + fn next_pyarrow(&mut self, py: Python<'_>) -> Result> { + let mut execution = self.execution()?; + let batch = execution.as_mut().unwrap().next_blocking()?; + let result = match batch { + Some(batch) => Some(batch.to_pyarrow(py)?), + None => None, + }; + Ok(result) + } + + fn next_pyarrow_async<'py>(&self, py: Python<'py>) -> PyResult<&'py PyAny> { + let execution = self.clone(); + pyo3_asyncio::tokio::future_into_py(py, async move { + // We can't use `?` here because it attempts ot acquire the GIL unexpectedly. + let next = execution + .owned_execution() + .map_err(|e| e.into_error()) + .and_then(|mut execution| async move { + execution.as_mut().unwrap().next().await.into_error() + }) + .await; + + Python::with_gil(|py| { + if let Some(batch) = next? { + Ok(batch.to_pyarrow(py)?) + } else { + Ok(py.None()) + } + }) + }) + } + + fn stop(&mut self) -> Result<()> { + self.execution()?.as_mut().unwrap().stop(); + Ok(()) + } +} diff --git a/python/src/expr.rs b/python/src/expr.rs new file mode 100644 index 000000000..0acc47464 --- /dev/null +++ b/python/src/expr.rs @@ -0,0 +1,150 @@ +use crate::error::Result; +use crate::execution::Execution; +use crate::session::Session; +use arrow::datatypes::DataType; +use arrow::pyarrow::{FromPyArrow, ToPyArrow}; +use pyo3::exceptions::{PyRuntimeError, PyValueError}; +use pyo3::prelude::*; +use sparrow_session::{Expr as RustExpr, Literal, Session as RustSession}; + +/// Kaskada expression node. +#[derive(Clone)] +#[pyclass(subclass)] +pub(crate) struct Expr { + pub rust_expr: RustExpr, + pub session: Session, +} + +#[pymethods] +impl Expr { + /// Create a new expression. + /// + /// This creates a new expression based on the `operation` and `args` provided. + #[staticmethod] + #[pyo3(signature = (session, operation, args))] + fn call(session: Session, operation: String, args: Vec) -> PyResult { + if !args.iter().all(|e| e.session() == session) { + return Err(PyValueError::new_err( + "all arguments must be in the same session", + )); + } + + let mut rust_session = session.rust_session()?; + let args: Vec<_> = args.into_iter().map(|e| e.rust_expr).collect(); + // TODO: - Support adding a UDF here. + let rust_expr = match rust_session.add_expr(&operation, args) { + Ok(node) => node, + Err(e) => { + // DO NOT SUBMIT: Better error handling. + return Err(PyValueError::new_err(e.to_string())); + } + }; + std::mem::drop(rust_session); + + Ok(Self { rust_expr, session }) + } + + #[staticmethod] + #[pyo3(signature = (session, value))] + fn literal(session: Session, value: Option) -> PyResult { + let mut rust_session = session.rust_session()?; + + let rust_expr = match value { + None => rust_session + .add_literal(Literal::Null) + .map_err(|_| PyRuntimeError::new_err("unable to create null literal"))?, + Some(arg) => { + arg.into_ast_dfg_ref(&mut rust_session) + // DO NOT SUBMIT: Better error handling. + .map_err(|_| PyRuntimeError::new_err("unable to create argument"))? + } + }; + std::mem::drop(rust_session); + Ok(Self { rust_expr, session }) + } + + #[pyo3(signature = (data_type))] + fn cast(&self, data_type: &PyAny) -> Result { + let data_type = DataType::from_pyarrow(data_type)?; + + let mut rust_session = self.session.rust_session()?; + let rust_expr = rust_session.add_cast(self.rust_expr.clone(), data_type)?; + std::mem::drop(rust_session); + + let session = self.session.clone(); + Ok(Self { rust_expr, session }) + } + + /// Return the session this expression is in. + fn session(&self) -> Session { + self.session.clone() + } + + fn execute(&self, options: Option<&PyAny>) -> Result { + let session = self.session.rust_session()?; + let options = extract_options(options)?; + let execution = session.execute(&self.rust_expr, options)?; + Ok(Execution::new(execution)) + } + + /// Return the `pyarrow` type of the resulting expression. + fn data_type(&self, py: Python<'_>) -> Result> { + match self.rust_expr.data_type() { + Some(t) => Ok(Some(t.to_pyarrow(py)?)), + _ => Ok(None), + } + } + + #[pyo3(signature = ())] + fn is_continuous(&self) -> bool { + self.rust_expr.is_continuous() + } + + fn grouping(&self) -> Option { + self.rust_expr.grouping() + } +} + +#[derive(FromPyObject)] +enum Arg { + Expr(Expr), + LiteralBool(bool), + LiteralUInt(u64), + LiteralInt(i64), + LiteralFloat(f64), + LiteralString(String), +} + +impl Arg { + fn into_ast_dfg_ref( + self, + session: &mut RustSession, + ) -> error_stack::Result { + match self { + Self::Expr(e) => Ok(e.rust_expr.clone()), + Self::LiteralBool(b) => session.add_literal(Literal::Bool(b)), + Self::LiteralUInt(n) => session.add_literal(Literal::UInt64(n)), + Self::LiteralInt(n) => session.add_literal(Literal::Int64(n)), + Self::LiteralFloat(n) => session.add_literal(Literal::Float64(n)), + Self::LiteralString(s) => session.add_literal(Literal::String(s)), + } + } +} + +fn extract_options(options: Option<&PyAny>) -> Result { + match options { + None => Ok(sparrow_session::ExecutionOptions::default()), + Some(options) => { + let py = options.py(); + let row_limit = pyo3::intern!(py, "row_limit"); + let max_batch_size = pyo3::intern!(py, "max_batch_size"); + let materialize = pyo3::intern!(py, "materialize"); + + Ok(sparrow_session::ExecutionOptions { + row_limit: options.getattr(row_limit)?.extract()?, + max_batch_size: options.getattr(max_batch_size)?.extract()?, + materialize: options.getattr(materialize)?.extract()?, + }) + } + } +} diff --git a/python/src/lib.rs b/python/src/lib.rs new file mode 100644 index 000000000..957981d31 --- /dev/null +++ b/python/src/lib.rs @@ -0,0 +1,26 @@ +use pyo3::prelude::*; + +mod error; +mod execution; +mod expr; +mod session; +mod table; +mod udf; + +#[global_allocator] +static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; + +/// A Python module implemented in Rust. The name of this function must match +/// the `lib.name` setting in the `Cargo.toml`, else Python will not be able to +/// import the module. +#[pymodule] +#[pyo3(name = "_ffi")] +fn ffi(_py: Python<'_>, m: &PyModule) -> PyResult<()> { + m.add_function(wrap_pyfunction!(udf::call_udf, m)?)?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + + Ok(()) +} diff --git a/python/src/session.rs b/python/src/session.rs new file mode 100644 index 000000000..560468f51 --- /dev/null +++ b/python/src/session.rs @@ -0,0 +1,33 @@ +use std::sync::{Arc, Mutex, MutexGuard}; + +use pyo3::prelude::*; +use sparrow_session::Session as RustSession; + +/// Kaskada session object. +#[derive(Clone)] +#[pyclass] +pub(crate) struct Session(Arc>); + +impl PartialEq for Session { + fn eq(&self, other: &Self) -> bool { + Arc::ptr_eq(&self.0, &other.0) + } +} + +impl Eq for Session {} + +impl Session { + pub(crate) fn rust_session(&self) -> PyResult> { + self.0 + .lock() + .map_err(|e| pyo3::exceptions::PyRuntimeError::new_err(e.to_string())) + } +} + +#[pymethods] +impl Session { + #[new] + fn new() -> Self { + Self(Arc::new(Mutex::new(RustSession::default()))) + } +} diff --git a/python/src/table.rs b/python/src/table.rs new file mode 100644 index 000000000..0a34de936 --- /dev/null +++ b/python/src/table.rs @@ -0,0 +1,67 @@ +use std::sync::Arc; + +use arrow::datatypes::Schema; +use arrow::pyarrow::{FromPyArrow, PyArrowType}; +use arrow::record_batch::RecordBatch; +use pyo3::prelude::*; +use sparrow_session::Table as RustTable; + +use crate::error::Result; +use crate::expr::Expr; +use crate::session::Session; + +#[pyclass(extends=Expr, subclass)] +pub(crate) struct Table { + #[pyo3(get)] + name: String, + rust_table: RustTable, +} + +#[pymethods] +impl Table { + /// Create a new table. + #[new] + #[pyo3(signature = (session, name, time_column_name, key_column_name, schema, subsort_column_name, grouping_name, time_unit))] + #[allow(clippy::too_many_arguments)] + fn new( + session: Session, + name: String, + time_column_name: &str, + key_column_name: &str, + schema: PyArrowType, + subsort_column_name: Option<&str>, + grouping_name: Option<&str>, + time_unit: Option<&str>, + ) -> Result<(Self, Expr)> { + let raw_schema = Arc::new(schema.0); + + let rust_table = session.rust_session()?.add_table( + &name, + raw_schema, + time_column_name, + subsort_column_name, + key_column_name, + grouping_name, + time_unit, + )?; + + let rust_expr = rust_table.expr.clone(); + let table = Table { name, rust_table }; + let expr = Expr { rust_expr, session }; + Ok((table, expr)) + } + + /// Add PyArrow data to the given table. + + /// + /// TODO: Support other kinds of data: + /// - pyarrow RecordBatchReader + /// - Parquet file URLs + /// - Python generators? + /// TODO: Error handling + fn add_pyarrow(&mut self, data: &PyAny) -> Result<()> { + let data = RecordBatch::from_pyarrow(data)?; + self.rust_table.add_data(data)?; + Ok(()) + } +} diff --git a/python/src/udf.rs b/python/src/udf.rs new file mode 100644 index 000000000..5557b4c80 --- /dev/null +++ b/python/src/udf.rs @@ -0,0 +1,32 @@ +use arrow::array::ArrayData; +use arrow::datatypes::DataType; +use arrow::pyarrow::{FromPyArrow, ToPyArrow}; +use pyo3::prelude::*; +use pyo3::types::PyTuple; + +#[pyfunction] +#[pyo3(signature = (udf, result_type, *args))] +pub(super) fn call_udf<'py>( + py: Python<'py>, + udf: &'py PyAny, + result_type: &'py PyAny, + args: &'py PyTuple, +) -> PyResult<&'py PyAny> { + let result_type = DataType::from_pyarrow(result_type)?; + + // 1. Make sure we can convert each input to and from arrow arrays. + let mut udf_args = Vec::with_capacity(args.len() + 1); + udf_args.push(result_type.to_pyarrow(py)?); + for arg in args { + let array_data = ArrayData::from_pyarrow(arg)?; + let py_array: PyObject = array_data.to_pyarrow(py)?; + udf_args.push(py_array); + } + let args = PyTuple::new(py, udf_args); + let result = udf.call_method("run_pyarrow", args, None)?; + + let array_data: ArrayData = ArrayData::from_pyarrow(result)?; + assert_eq!(array_data.data_type(), &result_type); + + Ok(result) +} diff --git a/python/visualize.ipynb b/python/visualize.ipynb new file mode 100644 index 000000000..d79cd316b --- /dev/null +++ b/python/visualize.ipynb @@ -0,0 +1,71 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Configure reloading of loaded modules (useful for the visualization function)\n", + "%reload_ext autoreload\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import kaskada as kd\n", + "kd.init_session()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "data = \"\\n\".join(\n", + " [\n", + " \"time,key,m,n\",\n", + " \"1996-12-19T16:39:57,A,5,10\",\n", + " \"1997-01-19T16:39:58,B,24,3\",\n", + " \"1997-03-19T16:39:59,A,17,6\",\n", + " \"1997-04-18T16:40:00,A,,9\",\n", + " ]\n", + " )\n", + "source = kd.sources.CsvString(data, time_column_name=\"time\", key_column_name=\"key\")\n", + "\n", + "kd.plot.render(\n", + " kd.plot.Plot(source.col(\"m\"), name=\"m\"),\n", + " kd.plot.Plot(source.col(\"n\"), name=\"n\"),\n", + " kd.plot.Plot(source.col(\"m\").sum(), name = \"sum_m\"),\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "sparrow-py-AIxfaco5-py3.11", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/scripts/get_version.py b/scripts/get_version.py new file mode 100644 index 000000000..e93856f6a --- /dev/null +++ b/scripts/get_version.py @@ -0,0 +1,28 @@ +import tomlkit +import argparse +from typing import List + +def get_value_from_toml(file_path: str, toml_path: List[str]) -> str: + """Retrieve a value from a TOML file at the given path.""" + with open(file_path, 'r') as f: + data = tomlkit.parse(f.read()) + + temp = data + for key in toml_path: + temp = temp[key] + + return str(temp) # Convert value to string in case it's a number or boolean + +def main(): + parser = argparse.ArgumentParser(description='Retrieve value from a TOML file.') + parser.add_argument('file', type=str, help='Path to the TOML file.') + parser.add_argument('path', type=str, help='Path within the TOML file (e.g., package.version)') + + args = parser.parse_args() + + toml_path = args.path.split('.') + value = get_value_from_toml(args.file, toml_path) + print(f"{value}") + +if __name__ == "__main__": + main() diff --git a/scripts/set_versions.py b/scripts/set_versions.py new file mode 100644 index 000000000..2318c7ebc --- /dev/null +++ b/scripts/set_versions.py @@ -0,0 +1,48 @@ +import tomlkit +from tomlkit import dumps +import argparse +from collections import defaultdict +from typing import Dict, List + +def update_version_in_data(data: Dict, version: str, toml_paths: List[str]) -> None: + """Update the version number in a data dictionary (parsed TOML) at multiple paths.""" + for path in toml_paths: + temp = data + path = path.split('.') + for key in path[:-1]: + temp = temp[key] + temp[path[-1]] = version + +def main(): + parser = argparse.ArgumentParser(description='Update version in TOML files.') + parser.add_argument('version', type=str, help='The version number to set.') + parser.add_argument('entries', nargs='+', type=str, + help='TOML file and path, format: : (e.g., config.toml:package.version)') + + args = parser.parse_args() + + # Dictionary to hold the paths for each file + file_paths_dict = defaultdict(list) + + for entry in args.entries: + parts = entry.split(":") + if len(parts) != 2: + print(f"Invalid entry format: {entry}") + continue + + file_path, toml_path_str = parts + + file_paths_dict[file_path].append(toml_path_str) + + # Update the files using the stored paths + for file_path, paths in file_paths_dict.items(): + with open(file_path, 'r') as f: + data = tomlkit.parse(f.read()) + + update_version_in_data(data, args.version, paths) + + with open(file_path, 'w') as f: + f.write(dumps(data)) + +if __name__ == "__main__": + main() diff --git a/testdata/parquet/data_with_list.parquet b/testdata/parquet/data_with_list.parquet new file mode 100644 index 000000000..f4b0971f7 Binary files /dev/null and b/testdata/parquet/data_with_list.parquet differ diff --git a/tests/integration/api/api_suite_test.go b/tests/integration/api/api_suite_test.go index 49daad5b8..6b54f38dc 100644 --- a/tests/integration/api/api_suite_test.go +++ b/tests/integration/api/api_suite_test.go @@ -7,15 +7,12 @@ import ( "io" "net/http" "net/url" - "os" "runtime" "strings" "testing" "time" - "github.com/RedisAI/redisai-go/redisai" "github.com/apache/pulsar-client-go/pulsar" - "github.com/gomodule/redigo/redis" _ "github.com/lib/pq" _ "github.com/mattn/go-sqlite3" @@ -42,8 +39,6 @@ var ( minioRootUser = flag.String("minio-root-user", "minio", "root username for connecting to minio") minioRootPassword = flag.String("minio-root-password", "minio123", "root password for connecting to minio") minioEndpoint = flag.String("minio-endpoint", "127.0.0.1:9000", "endpoint for connecting to minio") - redisAIPort = flag.Int("redis-ai-port", 6379, "Port to connect to the redis-ai integration instance. Note that this should be a specific instance for integration tests only, as the test cleanup will wipe any existing data from the redis instance.") - redisAIHost = flag.String("redis-ai-host", "127.0.0.1", "Host to connect to the redis-ai integration instance. Note that this should be a specific instance for integration tests only, as the test cleanup will wipe any existing data from the redis instance.") kaskadaHostname = flag.String("hostname", "127.0.0.1", "hostname of Kaskada to connect") kaskadaGrpcPort = flag.Int("grpc-port", 50051, "Kaskada's gRPC port to connect") kaskadaRestPort = flag.Int("rest-port", 3365, "Kaskada's REST port to connect") @@ -58,7 +53,7 @@ var ( grpcConfig helpers.HostConfig ) -// Before starting tests, delete all tables associated with the Integration clientID. Also completely wipes connected RedisAI instance. +// Before starting tests, delete all tables associated with the Integration clientID. var _ = BeforeSuite(func() { flag.Parse() @@ -112,23 +107,6 @@ func isARM() bool { return strings.Contains(runtime.GOARCH, "arm") } -func getRedisAIClient(db int) *redisai.Client { - pool := &redis.Pool{Dial: func() (redis.Conn, error) { - return redis.Dial("tcp", fmt.Sprintf("%s:%d", *redisAIHost, *redisAIPort), redis.DialDatabase(db)) - }} - - return redisai.Connect("", pool) -} - -func wipeRedisDatabase(db int) { - //Cleanup all existing data in RedisAI - redisAIClient := getRedisAIClient(db) - defer redisAIClient.Close() - redisAIClient.ActiveConnNX() - err := redisAIClient.ActiveConn.Send("FLUSHALL", "SYNC") - Expect(err).ShouldNot(HaveOccurred()) -} - func getRestRequest(ctx context.Context, method, endpoint string, jsonBody []byte) *http.Request { var ( req *http.Request @@ -205,7 +183,7 @@ func primitiveSchemaField(name string, primitiveType v1alpha.DataType_PrimitiveT } func getRemotePulsarHostname() string { - if os.Getenv("ENV") == "local-local" { + if helpers.TestsAreRunningLocally() { return "localhost" } else { return "pulsar" diff --git a/tests/integration/api/go.mod b/tests/integration/api/go.mod index c08758c77..a42cc57e4 100644 --- a/tests/integration/api/go.mod +++ b/tests/integration/api/go.mod @@ -10,7 +10,6 @@ replace github.com/kaskada-ai/kaskada/tests/integration/shared => ../shared require ( github.com/apache/pulsar-client-go v0.9.0 - github.com/gomodule/redigo v1.8.9 github.com/google/uuid v1.3.0 github.com/jt-nti/gproto v0.0.0-20210304092907-23e645af1351 github.com/mattn/go-sqlite3 v1.14.16 @@ -112,7 +111,6 @@ require ( require ( ariga.io/atlas v0.5.0 // indirect entgo.io/ent v0.11.1 // indirect - github.com/RedisAI/redisai-go v1.0.1 github.com/agext/levenshtein v1.2.1 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/c2fo/vfs v2.1.4+incompatible // indirect diff --git a/tests/integration/api/go.sum b/tests/integration/api/go.sum deleted file mode 100644 index dce526481..000000000 --- a/tests/integration/api/go.sum +++ /dev/null @@ -1,1466 +0,0 @@ -ariga.io/atlas v0.5.0 h1:9HZclkGI/xsW7IqKZLIMfnUJ0Nkgm1X1nysq4SMkKsg= -ariga.io/atlas v0.5.0/go.mod h1:ofVetkJqlaWle3mvYmaS2uyFGFcc7dSq436tmxa/Mzk= -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= -cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= -cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= -cloud.google.com/go v0.82.0/go.mod h1:vlKccHJGuFBFufnAnuB08dfEH9Y3H7dzDzRECFdC2TA= -cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= -cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= -cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= -cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= -cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= -cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= -cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= -cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= -cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= -cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= -cloud.google.com/go v0.107.0 h1:qkj22L7bgkl6vIeZDlOY2po43Mx/TIa2Wsa7VR+PEww= -cloud.google.com/go v0.107.0/go.mod h1:wpc2eNrD7hXUTy8EKS10jkxpZBjASrORK7goS+3YX2I= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= -cloud.google.com/go/compute v1.2.0/go.mod h1:xlogom/6gr8RJGBe7nT2eGsQYAFUbbv8dbC29qE3Xmw= -cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= -cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= -cloud.google.com/go/compute v1.18.0 h1:FEigFqoDbys2cvFkZ9Fjq4gnHBP55anJ0yQyau2f9oY= -cloud.google.com/go/compute v1.18.0/go.mod h1:1X7yHxec2Ga+Ss6jPyjxRxpu2uu7PLgsOVXvgU0yacs= -cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= -cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= -cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= -cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c= -cloud.google.com/go/iam v0.1.1/go.mod h1:CKqrcnI/suGpybEHxZ7BMehL0oA4LpdyJdUlTl9jVMw= -cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= -cloud.google.com/go/iam v0.11.0 h1:kwCWfKwB6ePZoZnGLwrd3B6Ru/agoHANTUBWpVNIdnM= -cloud.google.com/go/iam v0.11.0/go.mod h1:9PiLDanza5D+oWFZiH1uG+RnRCfEGKoyl6yo4cgWZGY= -cloud.google.com/go/kms v1.1.0/go.mod h1:WdbppnCDMDpOvoYBMn1+gNmOeEoZYqAv+HeuKARGCXI= -cloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA= -cloud.google.com/go/longrunning v0.3.0 h1:NjljC+FYPV3uh5/OwWT6pVU+doBqMg2x/rZlE+CamDs= -cloud.google.com/go/monitoring v1.1.0/go.mod h1:L81pzz7HKn14QCMaCs6NTQkdBnE87TElyanS95vIcl4= -cloud.google.com/go/monitoring v1.4.0/go.mod h1:y6xnxfwI3hTFWOdkOaD7nfJVlwuC3/mS/5kvtT131p4= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/pubsub v1.19.0/go.mod h1:/O9kmSe9bb9KRnIAWkzmqhPjHo6LtzGOBYd/kr06XSs= -cloud.google.com/go/secretmanager v1.3.0/go.mod h1:+oLTkouyiYiabAQNugCeTS3PAArGiMJuBqvJnJsyH+U= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.21.0/go.mod h1:XmRlxkgPjlBONznT2dDUU/5XlpU2OjMnKuqnZI01LAA= -cloud.google.com/go/storage v1.27.0 h1:YOO045NZI9RKfCj1c5A/ZtuuENUc8OAW+gHdGnDgyMQ= -cloud.google.com/go/storage v1.27.0/go.mod h1:x9DOL8TK/ygDUMieqwfhdpQryTeEkhGKMi80i/iqR2s= -cloud.google.com/go/trace v1.0.0/go.mod h1:4iErSByzxkyHWzzlAj63/Gmjz0NH1ASqhJguHpGcr6A= -cloud.google.com/go/trace v1.2.0/go.mod h1:Wc8y/uYyOhPy12KEnXG9XGrvfMz5F5SrYecQlbW1rwM= -contrib.go.opencensus.io/exporter/aws v0.0.0-20200617204711-c478e41e60e9/go.mod h1:uu1P0UCM/6RbsMrgPa98ll8ZcHM858i/AD06a9aLRCA= -contrib.go.opencensus.io/exporter/stackdriver v0.13.10/go.mod h1:I5htMbyta491eUxufwwZPQdcKvvgzMB4O9ni41YnIM8= -contrib.go.opencensus.io/integrations/ocsql v0.1.7/go.mod h1:8DsSdjz3F+APR+0z0WkU1aRorQCFfRxvqjUUPMbF3fE= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -entgo.io/ent v0.11.1 h1:im67R+2W3Nee2bNS2YnoYz8oAF0Qz4AOlIvKRIAEISY= -entgo.io/ent v0.11.1/go.mod h1:X5b1YfMayrRTgKGO//8IqpL7XJx0uqdeReEkxNpXROA= -github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMbk2FiG/kXiLl8BRyzTWDw7gX/Hz7Dd5eDMs= -github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4= -github.com/99designs/keyring v1.2.1 h1:tYLp1ULvO7i3fI5vE21ReQuj99QFSs7lGm0xWyJo87o= -github.com/99designs/keyring v1.2.1/go.mod h1:fc+wB5KTk9wQ9sDx0kFXB3A0MaeGHM9AwRStKOQ5vOA= -github.com/AthenZ/athenz v1.10.39 h1:mtwHTF/v62ewY2Z5KWhuZgVXftBej1/Tn80zx4DcawY= -github.com/AthenZ/athenz v1.10.39/go.mod h1:3Tg8HLsiQZp81BJY58JBeU2BR6B/H4/0MQGfCwhHNEA= -github.com/Azure/azure-amqp-common-go/v3 v3.2.1/go.mod h1:O6X1iYHP7s2x7NjUKsXVhkwWrQhxrd+d8/3rRadj4CI= -github.com/Azure/azure-amqp-common-go/v3 v3.2.2/go.mod h1:O6X1iYHP7s2x7NjUKsXVhkwWrQhxrd+d8/3rRadj4CI= -github.com/Azure/azure-pipeline-go v0.2.3 h1:7U9HBg1JFK3jHl5qmo4CTZKFTVgMwdFHMVtCdfBE21U= -github.com/Azure/azure-pipeline-go v0.2.3/go.mod h1:x841ezTBIMG6O3lAcl8ATHnsOPVl2bqk7S3ta6S6u4k= -github.com/Azure/azure-sdk-for-go v51.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/azure-sdk-for-go v59.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/azure-sdk-for-go/sdk/azcore v0.19.0/go.mod h1:h6H6c8enJmmocHUbLiiGY6sx7f9i+X3m1CHdd5c6Rdw= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.0.0/go.mod h1:uGG2W01BaETf0Ozp+QxxKJdMBNRWPdstHG0Fmdwn1/U= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.1.1/go.mod h1:uGG2W01BaETf0Ozp+QxxKJdMBNRWPdstHG0Fmdwn1/U= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v0.11.0/go.mod h1:HcM1YX14R7CJcghJGOYCgdezslRSVzqwLf/q+4Y2r/0= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.0.0/go.mod h1:+6sju8gk8FRmSajX3Oz4G5Gm7P+mbqE9FVaXXFYTkCM= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.1.0/go.mod h1:bhXu1AjYL+wutSL/kpSq6s7733q2Rb0yuot9Zgfqa/0= -github.com/Azure/azure-sdk-for-go/sdk/internal v0.7.0/go.mod h1:yqy467j36fJxcRV2TzfVZ1pCb5vxm4BtZPUdYWe/Xo8= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.0.0/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w= -github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v0.4.1/go.mod h1:eZ4g6GUvXiGulfIbbhh1Xr4XwUYaYaWMqzGD/284wCA= -github.com/Azure/azure-service-bus-go v0.11.5/go.mod h1:MI6ge2CuQWBVq+ly456MY7XqNLJip5LO1iSFodbNLbU= -github.com/Azure/azure-storage-blob-go v0.14.0/go.mod h1:SMqIBi+SuiQH32bvyjngEewEeXoPfKMgWlBDaYf6fck= -github.com/Azure/azure-storage-blob-go v0.15.0 h1:rXtgp8tN1p29GvpGgfJetavIG0V7OgcSXPpwp3tx6qk= -github.com/Azure/azure-storage-blob-go v0.15.0/go.mod h1:vbjsVbX0dlxnRc4FFMPsS9BsJWPcne7GB7onqlPvz58= -github.com/Azure/go-amqp v0.16.0/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg= -github.com/Azure/go-amqp v0.16.4/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg= -github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= -github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= -github.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA= -github.com/Azure/go-autorest/autorest v0.11.19/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA= -github.com/Azure/go-autorest/autorest v0.11.22/go.mod h1:BAWYUWGPEtKPzjVkp0Q6an0MJcJDsoh5Z1BFAEFs4Xs= -github.com/Azure/go-autorest/autorest v0.11.28 h1:ndAExarwr5Y+GaHE6VCaY1kyS/HwwGGyuimVhWsHOEM= -github.com/Azure/go-autorest/autorest v0.11.28/go.mod h1:MrkzG3Y3AH668QyF9KRk5neJnGgmhQ6krbhR8Q5eMvA= -github.com/Azure/go-autorest/autorest/adal v0.9.5/go.mod h1:B7KF7jKIeC9Mct5spmyCB/A8CG/sEz1vwIRGv/bbw7A= -github.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= -github.com/Azure/go-autorest/autorest/adal v0.9.14/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= -github.com/Azure/go-autorest/autorest/adal v0.9.17/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= -github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= -github.com/Azure/go-autorest/autorest/adal v0.9.21 h1:jjQnVFXPfekaqb8vIsv2G1lxshoW+oGv4MDlhRtnYZk= -github.com/Azure/go-autorest/autorest/adal v0.9.21/go.mod h1:zua7mBUaCc5YnSLKYgGJR/w5ePdMDA6H56upLsHzA9U= -github.com/Azure/go-autorest/autorest/azure/auth v0.5.9/go.mod h1:hg3/1yw0Bq87O3KvvnJoAh34/0zbP7SFizX/qN5JvjU= -github.com/Azure/go-autorest/autorest/azure/cli v0.4.2/go.mod h1:7qkJkT+j6b+hIpzMOwPChJhTqS8VbsqqgULzMNRugoM= -github.com/Azure/go-autorest/autorest/date v0.3.0 h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw= -github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= -github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= -github.com/Azure/go-autorest/autorest/mocks v0.4.2 h1:PGN4EDXnuQbojHbU0UWoNvmu9AGVwYHG9/fkDYhtAfw= -github.com/Azure/go-autorest/autorest/mocks v0.4.2/go.mod h1:Vy7OitM9Kei0i1Oj+LvyAWMXJHeKH1MVlzFugfVrmyU= -github.com/Azure/go-autorest/autorest/to v0.4.0/go.mod h1:fE8iZBn7LQR7zH/9XU2NcPR4o9jEImooCeWJcYV/zLE= -github.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E= -github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+ZtXWSmf4Tg= -github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= -github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo= -github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= -github.com/AzureAD/microsoft-authentication-library-for-go v0.4.0/go.mod h1:Vt9sXTKwMyGcOxSmLDMnGPgqsUg7m8pe215qMLrDXw4= -github.com/AzureAD/microsoft-authentication-library-for-go v0.5.1/go.mod h1:Vt9sXTKwMyGcOxSmLDMnGPgqsUg7m8pe215qMLrDXw4= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= -github.com/DataDog/zstd v1.5.0 h1:+K/VEwIAaPcHiMtQvpLD4lqW7f0Gk3xdYZmI1hD+CXo= -github.com/DataDog/zstd v1.5.0/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= -github.com/GoogleCloudPlatform/cloudsql-proxy v1.29.0/go.mod h1:spvB9eLJH9dutlbPSRmHvSXXHOwGRyeXh1jVdquA2G8= -github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/RedisAI/redisai-go v1.0.1 h1:SUsds3o10WdI9e9g/vVRIGqh3i/FF5DhtomzEqLp/d8= -github.com/RedisAI/redisai-go v1.0.1/go.mod h1:FRf3yzUzpFsX6c+v5gBdRmTpR2+O/3lH0X50HT7qsbM= -github.com/actgardner/gogen-avro/v10 v10.1.0/go.mod h1:o+ybmVjEa27AAr35FRqU98DJu1fXES56uXniYFv4yDA= -github.com/actgardner/gogen-avro/v10 v10.2.1/go.mod h1:QUhjeHPchheYmMDni/Nx7VB0RsT/ee8YIgGY/xpEQgQ= -github.com/actgardner/gogen-avro/v9 v9.1.0/go.mod h1:nyTj6wPqDJoxM3qdnjcLv+EnMDSDFqE0qDpva2QRmKc= -github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= -github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/apache/arrow/go/arrow v0.0.0-20200730104253-651201b0f516 h1:byKBBF2CKWBjjA4J1ZL2JXttJULvWSl50LegTyRZ728= -github.com/apache/arrow/go/arrow v0.0.0-20200730104253-651201b0f516/go.mod h1:QNYViu/X0HXDHw7m3KXzWSVXIbfUvJqBFe6Gj8/pYA0= -github.com/apache/pulsar-client-go v0.9.0 h1:L5jvGFXJm0JNA/PgUiJctTVHHttCe4wIEFDv4vojiQM= -github.com/apache/pulsar-client-go v0.9.0/go.mod h1:fSAcBipgz4KQ/VgwZEJtQ71cCXMKm8ezznstrozrngw= -github.com/apache/thrift v0.0.0-20181112125854-24918abba929/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= -github.com/apache/thrift v0.14.2 h1:hY4rAyg7Eqbb27GB6gkhUKrRAuc8xRjlNtJq+LseKeY= -github.com/apache/thrift v0.14.2/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= -github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= -github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= -github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= -github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= -github.com/ardielle/ardielle-go v1.5.2 h1:TilHTpHIQJ27R1Tl/iITBzMwiUGSlVfiVhwDNGM3Zj4= -github.com/ardielle/ardielle-go v1.5.2/go.mod h1:I4hy1n795cUhaVt/ojz83SNVCYIGsAFAONtv2Dr7HUI= -github.com/ardielle/ardielle-tools v1.5.4/go.mod h1:oZN+JRMnqGiIhrzkRN9l26Cej9dEx4jeNG6A+AdkShk= -github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= -github.com/aws/aws-sdk-go v1.30.19/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= -github.com/aws/aws-sdk-go v1.32.6/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= -github.com/aws/aws-sdk-go v1.37.0/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= -github.com/aws/aws-sdk-go v1.43.31/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= -github.com/aws/aws-sdk-go v1.44.122 h1:p6mw01WBaNpbdP2xrisz5tIkcNwzj/HysobNoaAHjgo= -github.com/aws/aws-sdk-go v1.44.122/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= -github.com/aws/aws-sdk-go-v2 v1.16.2/go.mod h1:ytwTPBG6fXTZLxxeeCCWj2/EMYp/xDUgX+OET6TLNNU= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.1/go.mod h1:n8Bs1ElDD2wJ9kCRTczA83gYbBmjSwZp3umc6zF4EeM= -github.com/aws/aws-sdk-go-v2/config v1.15.3/go.mod h1:9YL3v07Xc/ohTsxFXzan9ZpFpdTOFl4X65BAKYaz8jg= -github.com/aws/aws-sdk-go-v2/credentials v1.11.2/go.mod h1:j8YsY9TXTm31k4eFhspiQicfXPLZ0gYXA50i4gxPE8g= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.12.3/go.mod h1:uk1vhHHERfSVCUnqSqz8O48LBYDSC+k6brng09jcMOk= -github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.3/go.mod h1:0dHuD2HZZSiwfJSy1FO5bX1hQ1TxVV1QXXjpn3XUE44= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.9/go.mod h1:AnVH5pvai0pAF4lXRq0bmhbes1u9R8wTE+g+183bZNM= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.3/go.mod h1:ssOhaLpRlh88H3UmEcsBoVKq309quMvm3Ds8e9d4eJM= -github.com/aws/aws-sdk-go-v2/internal/ini v1.3.10/go.mod h1:8DcYQcz0+ZJaSxANlHIsbbi6S+zMwjwdDqwW3r9AzaE= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.1/go.mod h1:GeUru+8VzrTXV/83XyMJ80KpH8xO89VPoUileyNQ+tc= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.3/go.mod h1:Seb8KNmD6kVTjwRjVEgOT5hPin6sq+v4C2ycJQDwuH8= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.3/go.mod h1:wlY6SVjuwvh3TVRpTqdy4I1JpBFLX4UGeKZdWntaocw= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.3/go.mod h1:Bm/v2IaN6rZ+Op7zX+bOUMdL4fsrYZiD0dsjLhNKwZc= -github.com/aws/aws-sdk-go-v2/service/kms v1.16.3/go.mod h1:QuiHPBqlOFCi4LqdSskYYAWpQlx3PKmohy+rE2F+o5g= -github.com/aws/aws-sdk-go-v2/service/s3 v1.26.3/go.mod h1:g1qvDuRsJY+XghsV6zg00Z4KJ7DtFFCx8fJD2a491Ak= -github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.15.4/go.mod h1:PJc8s+lxyU8rrre0/4a0pn2wgwiDvOEzoOjcJUBr67o= -github.com/aws/aws-sdk-go-v2/service/sns v1.17.4/go.mod h1:kElt+uCcXxcqFyc+bQqZPFD9DME/eC6oHBXvFzQ9Bcw= -github.com/aws/aws-sdk-go-v2/service/sqs v1.18.3/go.mod h1:skmQo0UPvsjsuYYSYMVmrPc1HWCbHUJyrCEp+ZaLzqM= -github.com/aws/aws-sdk-go-v2/service/ssm v1.24.1/go.mod h1:NR/xoKjdbRJ+qx0pMR4mI+N/H1I1ynHwXnO6FowXJc0= -github.com/aws/aws-sdk-go-v2/service/sso v1.11.3/go.mod h1:7UQ/e69kU7LDPtY40OyoHYgRmgfGM4mgsLYtcObdveU= -github.com/aws/aws-sdk-go-v2/service/sts v1.16.3/go.mod h1:bfBj0iVmsUyUg4weDB4NxktD9rDGeKSVWnjTnwbx9b8= -github.com/aws/smithy-go v1.11.2/go.mod h1:3xHYmszWVx2c0kIwQeEVf9uSm4fYZt67FBJnwub1bgM= -github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= -github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= -github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= -github.com/bmizerany/perks v0.0.0-20141205001514-d9a9656a3a4b/go.mod h1:ac9efd0D1fsDb3EJvhqgXRbFx7bs2wqZ10HQPeU8U/Q= -github.com/c2fo/vfs v2.1.4+incompatible h1:vFvWpCkDESKR7DijQm0s0SvVfB8uOdsGE9e1NUdz9WI= -github.com/c2fo/vfs v2.1.4+incompatible/go.mod h1:AyCojqeh1jPGpx4TToydf00RjrsY84u+IJCehY54XzI= -github.com/c2fo/vfs/v6 v6.6.0 h1:R8cX3J3TeN44A3LY7q3gPw48LWN3ibR5DMPeIxg+8cM= -github.com/c2fo/vfs/v6 v6.6.0/go.mod h1:gW7r6Iq2dFtEdXgLRxXi2vzyVsKf7WUJl3IOVaRh6NY= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= -github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= -github.com/colinmarc/hdfs/v2 v2.1.1/go.mod h1:M3x+k8UKKmxtFu++uAZ0OtDU8jR3jnaZIAc6yK4Ue0c= -github.com/confluentinc/confluent-kafka-go v1.9.2 h1:gV/GxhMBUb03tFWkN+7kdhg+zf+QUM+wVkI9zwh770Q= -github.com/confluentinc/confluent-kafka-go v1.9.2/go.mod h1:ptXNqsuDfYbAE/LBW6pnwWZElUoWxHoV8E43DCrliyo= -github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/danieljoos/wincred v1.1.2 h1:QLdCxFs1/Yl4zduvBdcHB8goaYk9RARS2SgLLRuAyr0= -github.com/danieljoos/wincred v1.1.2/go.mod h1:GijpziifJoIBfYh+S7BbkdUTU4LfM+QnGqR5Vl2tAx0= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/denisenkom/go-mssqldb v0.12.0/go.mod h1:iiK0YP1ZeepvmBQk/QpLEhhTNJgfzrpArPY/aFvc9yU= -github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY= -github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= -github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= -github.com/dimfeld/httptreemux v5.0.1+incompatible h1:Qj3gVcDNoOthBAqftuD596rm4wg/adLLz5xh5CmpiCA= -github.com/dimfeld/httptreemux v5.0.1+incompatible/go.mod h1:rbUlSV+CCpv/SuqUTP/8Bk2O3LyUV436/yaRGkhP6Z0= -github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= -github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= -github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= -github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= -github.com/dvsekhvalnov/jose2go v1.5.0 h1:3j8ya4Z4kMCwT5nXIKFSV84YS+HdqSSO0VsTQxaLAeM= -github.com/dvsekhvalnov/jose2go v1.5.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= -github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= -github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/envoyproxy/protoc-gen-validate v0.9.1 h1:PS7VIOgmSVhWUEeZwTe7z7zouA22Cr590PzXKbZHOVY= -github.com/envoyproxy/protoc-gen-validate v0.9.1/go.mod h1:OKNgG7TCp5pF4d6XftA0++PMirau2/yoOwVac3AbF2w= -github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= -github.com/frankban/quicktest v1.2.2/go.mod h1:Qh/WofXFeiAFII1aEBu529AtJo6Zg2VHscnEsbBnJ20= -github.com/frankban/quicktest v1.7.2/go.mod h1:jaStnuzAqU1AJdCO0l53JDCJrVDKcS03DbaAcR7Ks/o= -github.com/frankban/quicktest v1.10.0/go.mod h1:ui7WezCLWMWxVWr1GETZY3smRy0G4KWq9vcPtJmFl7Y= -github.com/frankban/quicktest v1.14.0/go.mod h1:NeW+ay9A/U67EYXNFA1nPE8e/tnQv/09mUdL/ijj8og= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.5.1 h1:mZcQUHVQUQWoPXXtuf9yuEXKudkV2sx1E06UadKWpgI= -github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= -github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= -github.com/gin-gonic/gin v1.7.3/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= -github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= -github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= -github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= -github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4= -github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4= -github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= -github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= -github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= -github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI= -github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= -github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= -github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= -github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= -github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= -github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= -github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= -github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= -github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= -github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= -github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 h1:ZpnhV/YsD2/4cESfV5+Hoeu/iUR3ruzNvZ+yQfO03a0= -github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= -github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= -github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang-jwt/jwt v3.2.1+incompatible h1:73Z+4BJcrTC+KczS6WvTPvRGOp1WmfEP4Q1lOd9Z/+c= -github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= -github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= -github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= -github.com/golang-jwt/jwt/v4 v4.4.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-jwt/jwt/v4 v4.4.2 h1:rcc4lwaZgFMCZ5jxF9ABolDcIHdBytAFgqFPbSJQAYs= -github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= -github.com/golang-sql/sqlexp v0.0.0-20170517235910-f1bb20e5a188/go.mod h1:vXjM/+wXQnTPR4KqTKDgJukSZ6amVRtWMPEjE6sQoK8= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.0.0 h1:nfP3RFugxnNRyKgeWd4oI1nYvXpxrx8ck8ZrcizshdQ= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= -github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= -github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.3 h1:fHPg5GQYlCeLIPB9BZqMVR5nR9A+IM5zcgeTdjMYmLA= -github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= -github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/gomodule/redigo v1.8.9 h1:Sl3u+2BI/kk+VEatbj0scLdrFhjPmbxOc1myhDP41ws= -github.com/gomodule/redigo v1.8.9/go.mod h1:7ArFNvsTjH8GMMzB4uy1snslv2BwmginuMs06a1uzZE= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/flatbuffers v1.11.0 h1:O7CEyB8Cb3/DmtxODGtLHcEvpr81Jm5qLg/hsHnxA2A= -github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.2.1-0.20190312032427-6f77996f0c42/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-replayers/grpcreplay v1.1.0/go.mod h1:qzAvJ8/wi57zq7gWqaE6AwLM6miiXUQwP1S+I9icmhk= -github.com/google/go-replayers/httpreplay v1.1.1/go.mod h1:gN9GeLIs7l6NUoVaSSnv2RiqK1NiwAmD0MrKeC9IIks= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible h1:xmapqc1AyLoB+ddYT6r04bD9lIjlOqGaREovi0SzFaE= -github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= -github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw= -github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210506205249-923b5ab0fc1a/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 h1:K6RDEckDVWvDI9JAJYCmNdQXq6neHJOYx3V6jnqNEec= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20211008130755-947d60d73cc0 h1:zHs+jv3LO743/zFGcByu2KmpbliCU2AhjcGgrdTwSG4= -github.com/google/pprof v0.0.0-20211008130755-947d60d73cc0/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/subcommands v1.0.1/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= -github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= -github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/wire v0.5.0/go.mod h1:ngWDr9Qvq3yZA10YrxfyGELY/AFWGVpy9c1LTRi1EoU= -github.com/googleapis/enterprise-certificate-proxy v0.2.3 h1:yk9/cqRKtT9wXZSsRH9aurXEpJX+U6FLtpYTdC3R06k= -github.com/googleapis/enterprise-certificate-proxy v0.2.3/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= -github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= -github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= -github.com/googleapis/gax-go/v2 v2.7.0 h1:IcsPKeInNvYi7eqSaDjiZqDDKu5rsmunY0Y1YupQSSQ= -github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.0 h1:1JYBfzqrWPcCclBwxFCPAou9n+q86mfnu7NAeHfte7A= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.0/go.mod h1:YDZoGHuwE+ov0c8smSH49WLF3F2LaWnYYuDVd+EWrc0= -github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= -github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= -github.com/hamba/avro v1.5.6/go.mod h1:3vNT0RLXXpFm2Tb/5KC71ZRJlOroggq1Rcitb6k4Fr8= -github.com/hanwen/go-fuse v1.0.0/go.mod h1:unqXarDXqzAk0rt98O2tVndEPIpUgLD9+rwFisZH3Ok= -github.com/hanwen/go-fuse/v2 v2.1.0/go.mod h1:oRyA5eK+pvJyv5otpO/DgccS8y/RvYMaO00GgRLGryc= -github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= -github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= -github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= -github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= -github.com/hashicorp/go-uuid v0.0.0-20180228145832-27454136f036/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/hcl/v2 v2.10.0 h1:1S1UnuhDGlv3gRFV4+0EdwB+znNP5HmcGbIqwnSCByg= -github.com/hashicorp/hcl/v2 v2.10.0/go.mod h1:FwWsfWEjyV/CMj8s/gqAuiviY72rJ1/oayI9WftqcKg= -github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= -github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= -github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= -github.com/heetch/avro v0.3.1/go.mod h1:4xn38Oz/+hiEUTpbVfGVLfvOg0yKLlRP7Q9+gJJILgA= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/invopop/jsonschema v0.4.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0= -github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= -github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= -github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= -github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= -github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= -github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= -github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= -github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= -github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= -github.com/jackc/pgconn v1.11.0/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= -github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= -github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= -github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= -github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= -github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= -github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= -github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= -github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= -github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= -github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= -github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgproto3/v2 v2.2.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= -github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= -github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= -github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= -github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= -github.com/jackc/pgtype v1.10.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= -github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= -github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= -github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= -github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= -github.com/jackc/pgx/v4 v4.15.0/go.mod h1:D/zyOyXiaM1TmVWnOM18p0xdDtdakRBa0RsVGI3U3bw= -github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v1.2.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jawher/mow.cli v1.0.4/go.mod h1:5hQj2V8g+qYmLUVWqu4Wuja1pI57M83EChYLVZ0sMKk= -github.com/jawher/mow.cli v1.2.0/go.mod h1:y+pcA3jBAdo/GIZx/0rFjw/K2bVEODP9rfZOfaiq8Ko= -github.com/jcmturner/gofork v0.0.0-20180107083740-2aebee971930/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o= -github.com/jhump/gopoet v0.0.0-20190322174617-17282ff210b3/go.mod h1:me9yfT6IJSlOL3FCfrg+L6yzUEZ+5jW6WHt4Sk+UPUI= -github.com/jhump/gopoet v0.1.0/go.mod h1:me9yfT6IJSlOL3FCfrg+L6yzUEZ+5jW6WHt4Sk+UPUI= -github.com/jhump/goprotoc v0.5.0/go.mod h1:VrbvcYrQOrTi3i0Vf+m+oqQWk9l72mjkJCYo7UvLHRQ= -github.com/jhump/protoreflect v1.11.0/go.mod h1:U7aMIjN0NWq9swDP7xDdoMfRHb35uiuTd3Z9nFXJf5E= -github.com/jhump/protoreflect v1.12.0/go.mod h1:JytZfP5d0r8pVNLZvai7U/MCuTWITgrI4tTg7puQFKI= -github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= -github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= -github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= -github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= -github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= -github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= -github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= -github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/jt-nti/gproto v0.0.0-20210304092907-23e645af1351 h1:jYsiD6zdBzctjZ4sDB+gGJJPB3NROHrUuCp/wUj5p9Y= -github.com/jt-nti/gproto v0.0.0-20210304092907-23e645af1351/go.mod h1:yfoLDf8VFUCWSxFJsPuQT5BlqdDbGkDl5m6hzABroMI= -github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/juju/qthttptest v0.1.1/go.mod h1:aTlAv8TYaflIiTDIQYzxnl1QdPjAg8Q8qJMErpKy6A4= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= -github.com/klauspost/compress v1.14.4/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.15.1/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= -github.com/klauspost/compress v1.15.15 h1:EF27CXIuDsYJ6mmvtBRlEuB2UVOqHG1tAXgZ7yIO+lw= -github.com/klauspost/compress v1.15.15/go.mod h1:ZcK2JAFqKOpnBlxcLsJzYfrS9X1akm9fHZNnD9+Vo/4= -github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.1.0/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= -github.com/klauspost/cpuid/v2 v2.2.3 h1:sxCkb+qR91z4vsqw4vGGZlDgPz3G7gjaLyK3V8y70BU= -github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= -github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= -github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.4/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.6 h1:jbk+ZieJ0D7EVGJYpL9QTz7/YW6UHbmdnZWYyK5cdBs= -github.com/lib/pq v1.10.6/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/linkedin/goavro v2.1.0+incompatible/go.mod h1:bBCwI2eGYpUI/4820s67MElg9tdeLbINjLjiM2xZFYM= -github.com/linkedin/goavro/v2 v2.9.8/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA= -github.com/linkedin/goavro/v2 v2.10.0/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA= -github.com/linkedin/goavro/v2 v2.10.1/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA= -github.com/linkedin/goavro/v2 v2.11.1/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA= -github.com/linkedin/goavro/v2 v2.12.0 h1:rIQQSj8jdAUlKQh6DttK8wCRv4t4QO09g1C4aBWXslg= -github.com/linkedin/goavro/v2 v2.12.0/go.mod h1:KXx+erlq+RPlGSPmLF7xGo6SAbh8sCQ53x064+ioxhk= -github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= -github.com/lufia/plan9stats v0.0.0-20220913051719-115f729f3c8c h1:VtwQ41oftZwlMnOEbMWQtSEUgU64U4s+GHk7hZK+jtY= -github.com/lufia/plan9stats v0.0.0-20220913051719-115f729f3c8c/go.mod h1:JKx41uQRwqlTZabZc+kILPrO/3jlKnQ2Z8b7YiVw5cE= -github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E= -github.com/mattn/go-ieproxy v0.0.9 h1:RvVbLiMv/Hbjf1gRaC2AQyzwbdVhdId7D2vPnXIml4k= -github.com/mattn/go-ieproxy v0.0.9/go.mod h1:eF30/rfdQUO9EnzNIZQr0r9HiLMlZNCpJkHbmMuOAE0= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y= -github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= -github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/minio/madmin-go v1.7.5 h1:IF8j2HR0jWc7msiOcy0KJ8EyY7Q3z+j+lsmSDksQm+I= -github.com/minio/madmin-go v1.7.5/go.mod h1:3SO8SROxHN++tF6QxdTii2SSUaYSrr8lnE9EJWjvz0k= -github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= -github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= -github.com/minio/minio-go/v7 v7.0.34/go.mod h1:nCrRzjoSUQh8hgKKtu3Y708OLvRLtuASMg2/nvmbarw= -github.com/minio/minio-go/v7 v7.0.49 h1:dE5DfOtnXMXCjr/HWI6zN9vCrY6Sv666qhhiwUMvGV4= -github.com/minio/minio-go/v7 v7.0.49/go.mod h1:UI34MvQEiob3Cf/gGExGMmzugkM/tNgbFypNDy5LMVc= -github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g= -github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM= -github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= -github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= -github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= -github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= -github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= -github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= -github.com/montanaflynn/stats v0.6.6/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= -github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= -github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/namsral/flag v1.7.4-pre h1:b2ScHhoCUkbsq0d2C15Mv+VU8bl8hAXV8arnWiOHNZs= -github.com/namsral/flag v1.7.4-pre/go.mod h1:OXldTctbM6SWH1K899kPZcf65KxJiD7MsceFUpB5yDo= -github.com/ncw/swift v1.0.52/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= -github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= -github.com/nrwiersma/avro-benchmarks v0.0.0-20210913175520-21aec48c8f76/go.mod h1:iKyFMidsk/sVYONJRE372sJuX/QTRPacU7imPqqsu7g= -github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= -github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= -github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= -github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= -github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= -github.com/onsi/ginkgo/v2 v2.9.2 h1:BA2GMJOtfGAfagzYtrAlufIP0lq6QERkFmHLMLPwFSU= -github.com/onsi/ginkgo/v2 v2.9.2/go.mod h1:WHcJJG2dIlcCqVfBAwUCrJxSPFb6v4azBwgxeMeDuts= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.10.5/go.mod h1:gza4q3jKQJijlu05nKWRCW/GavJumGt8aNRxWg7mt48= -github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= -github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= -github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE= -github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg= -github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= -github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pborman/getopt v0.0.0-20180729010549-6fdd0a2c7117/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o= -github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= -github.com/philhofer/fwd v1.1.1 h1:GdGcTjf5RNAxwS4QLsiMzJYj5KEvPJD3Abr261yRQXQ= -github.com/philhofer/fwd v1.1.1/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU= -github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I= -github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= -github.com/pierrec/lz4/v4 v4.1.8 h1:ieHkV+i2BRzngO4Wd/3HGowuZStgq6QkPsD1eolNAO4= -github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= -github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= -github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4/go.mod h1:N6UoU20jOqggOuDwUaBQpluzLNDqif3kq9z2wpdYEfQ= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= -github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= -github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c h1:NRoLoZvkBTKvR5gQLgA3e0hqjkY9u1wm+iOL45VN/qI= -github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= -github.com/prometheus/client_golang v1.14.0 h1:nJdhIvne2eSX/XRAFV9PcvFFRbrjbcTUj0VP62TMhnw= -github.com/prometheus/client_golang v1.14.0/go.mod h1:8vpkKitgIVNcqrRBWh1C4TIUQgYNtG/XQE4E/Zae36Y= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= -github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= -github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= -github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= -github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= -github.com/prometheus/common v0.37.0 h1:ccBbHCgIiT9uSoFY0vX8H3zsNR5eLt17/RQLUvn8pXE= -github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= -github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= -github.com/rogpeppe/clock v0.0.0-20190514195947-2896927a307a/go.mod h1:4r5QyqhjIWCcK8DO4KMclc5Iknq5qVBAlbYYzAbUScQ= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= -github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= -github.com/rs/xid v1.4.0 h1:qd7wPTDkN6KQx2VmMBLrpHkiyQwgFXRnkOLacUiaSNY= -github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= -github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= -github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= -github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/santhosh-tekuri/jsonschema/v5 v5.0.0/go.mod h1:FKdcjfQW6rpZSnxxUvEA5H/cDPdvJ/SZJQLWWXWGrZ0= -github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= -github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/secure-io/sio-go v0.3.1 h1:dNvY9awjabXTYGsTF1PiCySl9Ltofk9GA3VdWlo7rRc= -github.com/secure-io/sio-go v0.3.1/go.mod h1:+xbkjDzPjwh4Axd07pRKSNriS9SCiYksWnZqdnfpQxs= -github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/shirou/gopsutil/v3 v3.22.9 h1:yibtJhIVEMcdw+tCTbOPiF1VcsuDeTE4utJ8Dm4c5eA= -github.com/shirou/gopsutil/v3 v3.22.9/go.mod h1:bBYl1kjgEJpWpxeHmLI+dVHWtyAwfcmSBLDsp2TNT8A= -github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= -github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= -github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= -github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= -github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= -github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= -github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= -github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= -github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= -github.com/streamnative/pulsar-admin-go v0.1.0 h1:SRDfRFGFpfOOE01cQp5cgWCHqDexfhcWYo1vccvB61g= -github.com/streamnative/pulsar-admin-go v0.1.0/go.mod h1:0bfFvDCNjCPXWUbi2KHuGxGAHGMT4leLZZ1CLYw2qN0= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= -github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.2.0/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.3.1-0.20190311161405-34c6fa2dc709/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.5/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= -github.com/tinylib/msgp v1.1.6 h1:i+SbKraHhnrf9M5MYmvQhFnbLhAXSDWF8WWsuyRdocw= -github.com/tinylib/msgp v1.1.6/go.mod h1:75BAfg2hauQhs3qedfdDZmWAPcFMAvJE5b9rGOMufyw= -github.com/tklauser/go-sysconf v0.3.10 h1:IJ1AZGZRWbY8T5Vfk04D9WOA5WSejdflXxP03OUqALw= -github.com/tklauser/go-sysconf v0.3.10/go.mod h1:C8XykCvCb+Gn0oNCWPIlcb0RuglQTYaQ2hGm7jmxEFk= -github.com/tklauser/numcpus v0.4.0/go.mod h1:1+UI3pD8NW14VMwdgJNJ1ESk2UnwhAnz5hMwiKKqXCQ= -github.com/tklauser/numcpus v0.5.0 h1:ooe7gN0fg6myJ0EKoTAf5hebTZrH52px3New/D9iJ+A= -github.com/tklauser/numcpus v0.5.0/go.mod h1:OGzpTxpcIMNGYQdit2BYL1pvk/dSOaJWjKoflh+RQjo= -github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= -github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= -github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= -github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= -github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= -github.com/xitongsys/parquet-go v1.5.1/go.mod h1:xUxwM8ELydxh4edHGegYq1pA8NnMKDx0K/GyB0o2bww= -github.com/xitongsys/parquet-go v1.6.2 h1:MhCaXii4eqceKPu9BwrjLqyK10oX9WF+xGhwvwbw7xM= -github.com/xitongsys/parquet-go v1.6.2/go.mod h1:IulAQyalCm0rPiZVNnCgm/PCL64X2tdSVGMQ/UeKqWA= -github.com/xitongsys/parquet-go-source v0.0.0-20190524061010-2b72cbee77d5/go.mod h1:xxCx7Wpym/3QCo6JhujJX51dzSXrwmb0oH6FQb39SEA= -github.com/xitongsys/parquet-go-source v0.0.0-20200817004010-026bad9b25d0/go.mod h1:HYhIKsdns7xz80OgkbgJYrtQY7FjHWHKH6cvN7+czGE= -github.com/xitongsys/parquet-go-source v0.0.0-20221025031416-9877e685ef65 h1:PFE/D4/ON9K8A34VNusSS0j2YB4hIAbBNWn7TMMuM1E= -github.com/xitongsys/parquet-go-source v0.0.0-20221025031416-9877e685ef65/go.mod h1:d87JD+hvj9dzdbZNd9zJb9ThSLQfoUNx50KQ6nuf9Wg= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yusufpapurcu/wmi v1.2.2 h1:KBNDSne4vP5mbSWnJbO+51IMOXJB67QiYCSBrubbPRg= -github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= -github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= -github.com/zclconf/go-cty v1.8.0 h1:s4AvqaeQzJIu3ndv4gVIhplVD0krU+bgrcLSVUnaWuA= -github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= -github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= -github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= -go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= -go.opencensus.io v0.15.0/go.mod h1:UffZAU+4sDEINUGP/B7UfBBkq4fqLu9zXAX7ke6CHW0= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= -go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= -go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= -go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= -go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= -go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= -go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= -go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak= -go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= -go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= -go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= -go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw= -gocloud.dev v0.26.0/go.mod h1:mkUgejbnbLotorqDyvedJO20XcZNTynmSeVSQS9btVg= -golang.org/x/crypto v0.0.0-20180723164146-c126467f60eb/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= -golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20211115234514-b4de73f9ece8/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220331220935-ae2d96664a29/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.6.0 h1:qfktjS5LUO+fFKeJXZ+ikTRijMmljikvG68fpMMruSc= -golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= -golang.org/x/mod v0.9.0 h1:KENHtAZL2y3NLMYZeHY9DW8HW8V+kQyJsY/V9JlKvCs= -golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191112182307-2180aed22343/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200505041828-1ed23360d12c/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201010224723-4f7140c49acb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= -golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211020060615-d418f374d309/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220401154927-543a649e0bdd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220630215102-69896b714898/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ= -golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210427180440-81ed05c6b58c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.5.0 h1:HuArIo48skDwlrvM3sEdHXElYslAMsf3KwRkkW4MC4s= -golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= -golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191112214154-59a1497f0cea/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200828194041-157a740278f4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210503080704-8803ae5d1324/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210819135213-f52c844e1c1c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211116061358-0a5406a5449c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220204135822-1c1b9b1eba6a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220330033206-e17cdc41300f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.6.0 h1:clScbb1cHjoCkyRbWwBEUZ5H/tIFu5TAXIqaZD0Gcjw= -golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68= -golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20211116232009-f0f3c7e86c11/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20220224211638-0e9765cccd65/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200505023115-26f46d2f7ef8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201022035929-9cf592e881e9/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.7.0 h1:W4OVu8VVOaIO0yzWMNdepAulS7YfoS3Zabrm8DOXXU4= -golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= -golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= -golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= -google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= -google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= -google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= -google.golang.org/api v0.46.0/go.mod h1:ceL4oozhkAiTID8XMmJBsIxID/9wMXJVVFXPg4ylg3I= -google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= -google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= -google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= -google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= -google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= -google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= -google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= -google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= -google.golang.org/api v0.58.0/go.mod h1:cAbP2FsxoGVNwtgNAmmn3y5G1TWAiVYRmg4yku3lv+E= -google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= -google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= -google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= -google.golang.org/api v0.64.0/go.mod h1:931CdxA8Rm4t6zqTFGSsgwbAEZ2+GMYurbndwSimebM= -google.golang.org/api v0.66.0/go.mod h1:I1dmXYpX7HGwz/ejRxwQp2qj5bFAz93HiCU1C1oYd9M= -google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= -google.golang.org/api v0.68.0/go.mod h1:sOM8pTpwgflXRhz+oC8H2Dr+UcbMqkPPWNJo88Q7TH8= -google.golang.org/api v0.69.0/go.mod h1:boanBiw+h5c3s+tBPgEzLDRHfFLWV0qXxRHz3ws7C80= -google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= -google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= -google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= -google.golang.org/api v0.110.0 h1:l+rh0KYUooe9JGbGVx71tbFo4SMbMTXK3I3ia2QSEeU= -google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= -google.golang.org/genproto v0.0.0-20210429181445-86c259c2b4ab/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= -google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= -google.golang.org/genproto v0.0.0-20210517163617-5e0236093d7a/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= -google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= -google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= -google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= -google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= -google.golang.org/genproto v0.0.0-20210921142501-181ce0d877f6/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211018162055-cf77aa76bad2/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211223182754-3ac035c7e7cb/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220111164026-67b88f271998/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220114231437-d2e6a121cae0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220201184016-50beb8ab5c44/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220204002441-d6cc3cc0770e/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220211171837-173942840c17/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220216160803-4663080d8bc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= -google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= -google.golang.org/genproto v0.0.0-20220401170504-314d38edb7de/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= -google.golang.org/genproto v0.0.0-20220503193339-ba3ae3f07e29/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= -google.golang.org/genproto v0.0.0-20230221151758-ace64dc21148 h1:muK+gVBJBfFb4SejshDBlN2/UgxCCOKH9Y34ljqEGOc= -google.golang.org/genproto v0.0.0-20230221151758-ace64dc21148/go.mod h1:3Dl5ZL0q0isWJt+FVcfpQyirqemEuLAK/iFvg1UP1Hw= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= -google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= -google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= -google.golang.org/grpc v1.53.0 h1:LAv2ds7cmFV/XTS3XG1NneeENYrXGmorPxsBbptIjNc= -google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw= -google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= -google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= -gopkg.in/avro.v0 v0.0.0-20171217001914-a730b5802183/go.mod h1:FvqrFXt+jCsyQibeRv4xxEJBL5iG2DDW5aeJwzDiq4A= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/confluentinc/confluent-kafka-go.v1 v1.8.2 h1:QAgN6OC0o7dwvyz+HML6GYm+0Pk54O91+oxGqJ/5z8I= -gopkg.in/confluentinc/confluent-kafka-go.v1 v1.8.2/go.mod h1:ZdI3yfYmdNSLQPNCpO1y00EHyWaHG5EnQEyL/ntAegY= -gopkg.in/errgo.v1 v1.0.0/go.mod h1:CxwszS/Xz1C49Ucd2i6Zil5UToP1EmyrFhKaMVbg1mk= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/httprequest.v1 v1.2.1/go.mod h1:x2Otw96yda5+8+6ZeWwHIJTFkEHWP/qP8pJOzqEtWPM= -gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= -gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.66.6/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= -gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo= -gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q= -gopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4= -gopkg.in/jcmturner/gokrb5.v7 v7.3.0/go.mod h1:l8VISx+WGYp+Fp7KRbsiUuXTTOnxIc3Tuvyavf11/WM= -gopkg.in/jcmturner/rpc.v1 v1.1.0/go.mod h1:YIdkC4XfD6GXbzje11McwsDuOlZQSb9W4vfLvuNnlv8= -gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= -gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= -gopkg.in/retry.v1 v1.0.3/go.mod h1:FJkXmWiMaAo7xB+xhvDF59zhfjDWyzmyAxiT4dB688g= -gopkg.in/square/go-jose.v2 v2.4.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/tests/integration/api/graceful_shutdown_test.go b/tests/integration/api/graceful_shutdown_test.go index ad00addd1..e2fdf6e9a 100644 --- a/tests/integration/api/graceful_shutdown_test.go +++ b/tests/integration/api/graceful_shutdown_test.go @@ -2,49 +2,51 @@ package api_test import ( "context" + "fmt" "os/exec" "time" - "github.com/RedisAI/redisai-go/redisai" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" - . "github.com/onsi/gomega/gstruct" "google.golang.org/grpc" "google.golang.org/grpc/metadata" - "google.golang.org/protobuf/types/known/wrapperspb" v1alpha "github.com/kaskada-ai/kaskada/gen/proto/go/kaskada/kaskada/v1alpha" helpers "github.com/kaskada-ai/kaskada/tests/integration/shared/helpers" . "github.com/kaskada-ai/kaskada/tests/integration/shared/matchers" ) -var _ = PDescribe("Graceful Shutdown test", Ordered, Label("redis"), Label("redis-ai"), func() { +var _ = Describe("Graceful Shutdown test", Ordered, Label("docker"), func() { var ( ctx context.Context cancel context.CancelFunc conn *grpc.ClientConn - key1 string - key2 string - redisAIClient *redisai.Client materializationClient v1alpha.MaterializationServiceClient tableClient v1alpha.TableServiceClient queryClient v1alpha.QueryServiceClient + outputURI string table *v1alpha.Table + tableName string + materializationName string ) + tableName = "graceful_shutdown_table" + materializationName = "graceful_shutdown_mat" + query := ` { -time: transactions.transaction_time, -key: transactions.id, -max_price: transactions.price | max(), -min_spent_in_single_transaction: min(transactions.price * transactions.quantity) -max_spent_in_single_transaction: max(transactions.price * transactions.quantity) +time: graceful_shutdown_table.transaction_time, +key: graceful_shutdown_table.id, +max_price: graceful_shutdown_table.price | max(), +min_spent_in_single_transaction: min(graceful_shutdown_table.price * graceful_shutdown_table.quantity), +max_spent_in_single_transaction: max(graceful_shutdown_table.price * graceful_shutdown_table.quantity) }` - redisDb := 4 kaskadaIsDown := false terminateKaskada := func() { + defer GinkgoRecover() + cmd := exec.Command("docker", "kill", "-s", "SIGTERM", "kaskada") err := cmd.Run() Expect(err).ShouldNot(HaveOccurred(), "Unable to terminate kaskada") @@ -77,37 +79,39 @@ max_spent_in_single_transaction: max(transactions.price * transactions.quantity) _, err := tableClient.ListTables(ctx, &v1alpha.ListTablesRequest{}) g.Expect(err).ShouldNot(HaveOccurred()) + + materializationClient = v1alpha.NewMaterializationServiceClient(conn) + queryClient = v1alpha.NewQueryServiceClient(conn) }, "30s", "1s").Should(Succeed()) kaskadaIsDown = false - - // get a grpc client for the materialization & compute services - materializationClient = v1alpha.NewMaterializationServiceClient(conn) - queryClient = v1alpha.NewQueryServiceClient(conn) } BeforeAll(func() { - // get a redis connections for verifying results - redisAIClient = getRedisAIClient(redisDb) + if helpers.TestsAreRunningLocally() { + Skip("tests running locally, skipping gracefull shutdown test") + } - wipeRedisDatabase(redisDb) + // define the output path and make sure it is empty + outputURI = fmt.Sprintf("file:///data/output/%s", materializationName) - // declare the keys we are testing for - key1 = "Symdt3HKIYEFyzRCgdQl2/OKVBzjl7aO1XcKd7o70wM=" - key2 = "c5obkiyX5gof2EdzWlYbXZ98xfu+cpjxxvANgTfRNzM=" + //get connection to wren + ctx, cancel, conn = grpcConfig.GetContextCancelConnection(20) + ctx = metadata.AppendToOutgoingContext(ctx, "client-id", *integrationClientID) + + tableClient = v1alpha.NewTableServiceClient(conn) + materializationClient = v1alpha.NewMaterializationServiceClient(conn) + queryClient = v1alpha.NewQueryServiceClient(conn) // delete the table and materialization if not cleaned up in the previous run - tableClient.DeleteTable(ctx, &v1alpha.DeleteTableRequest{TableName: "transactions"}) - materializationClient.DeleteMaterialization(ctx, &v1alpha.DeleteMaterializationRequest{MaterializationName: "transaction_details"}) + tableClient.DeleteTable(ctx, &v1alpha.DeleteTableRequest{TableName: tableName}) + materializationClient.DeleteMaterialization(ctx, &v1alpha.DeleteMaterializationRequest{MaterializationName: materializationName}) // create a table table = &v1alpha.Table{ - TableName: "transactions", + TableName: tableName, TimeColumnName: "transaction_time", EntityKeyColumnName: "id", - SubsortColumnName: &wrapperspb.StringValue{ - Value: "idx", - }, } _, err := tableClient.CreateTable(ctx, &v1alpha.CreateTableRequest{Table: table}) Expect(err).ShouldNot(HaveOccurredGrpc()) @@ -117,12 +121,15 @@ max_spent_in_single_transaction: max(transactions.price * transactions.quantity) }) AfterAll(func() { + if helpers.TestsAreRunningLocally() { + Skip("tests running locally, skipping gracefull shutdown test") + } + // clean up items created - materializationClient.DeleteMaterialization(ctx, &v1alpha.DeleteMaterializationRequest{MaterializationName: "transaction_details"}) - // this materialization might not have been created if test had an issue, so we don't check error here - _, err := tableClient.DeleteTable(ctx, &v1alpha.DeleteTableRequest{TableName: "transactions"}) + _, err := tableClient.DeleteTable(ctx, &v1alpha.DeleteTableRequest{TableName: tableName}) Expect(err).ShouldNot(HaveOccurred()) + cancel() conn.Close() }) @@ -134,17 +141,19 @@ max_spent_in_single_transaction: max(transactions.price * transactions.quantity) Context("When the table schema is created correctly", func() { Describe("Start a query, and then send a termination signal to Kaskada", func() { It("should return query results before exiting", func() { - go terminateKaskada() - destination := &v1alpha.Destination_ObjectStore{ - ObjectStore: &v1alpha.ObjectStoreDestination{ - FileType: v1alpha.FileType_FILE_TYPE_PARQUET, + destination := &v1alpha.Destination{ + Destination: &v1alpha.Destination_ObjectStore{ + ObjectStore: &v1alpha.ObjectStoreDestination{ + FileType: v1alpha.FileType_FILE_TYPE_PARQUET, + }, }, } + go terminateKaskada() stream, err := queryClient.CreateQuery(ctx, &v1alpha.CreateQueryRequest{ Query: &v1alpha.Query{ Expression: query, - Destination: &v1alpha.Destination{Destination: destination}, + Destination: destination, ResultBehavior: v1alpha.Query_RESULT_BEHAVIOR_ALL_RESULTS, }, QueryOptions: &v1alpha.QueryOptions{ @@ -164,20 +173,7 @@ max_spent_in_single_transaction: max(transactions.price * transactions.quantity) resultsUrl := res.GetDestination().GetObjectStore().GetOutputPaths().Paths[0] results := helpers.DownloadParquet(resultsUrl) - Expect(len(results)).Should(Equal(100000)) - Expect(results).Should(ContainElement(MatchFields(IgnoreExtras, Fields{ - "Time": PointTo(BeEquivalentTo(20150106)), - "Key": PointTo(Equal(key1)), - "Max_list_price": PointTo(BeEquivalentTo(149)), - "Min_paid": PointTo(BeEquivalentTo(149)), - }))) - - Expect(results).Should(ContainElement(MatchFields(IgnoreExtras, Fields{ - "Time": PointTo(BeEquivalentTo(20150104)), - "Key": PointTo(Equal(key2)), - "Max_list_price": PointTo(BeEquivalentTo(149)), - "Min_paid": PointTo(BeEquivalentTo(149)), - }))) + Expect(len(results)).Should(Equal(50000)) }) }) @@ -185,40 +181,32 @@ max_spent_in_single_transaction: max(transactions.price * transactions.quantity) It("create the materialzation without error", func() { go terminateKaskada() + destination := &v1alpha.Destination{ + Destination: &v1alpha.Destination_ObjectStore{ + ObjectStore: &v1alpha.ObjectStoreDestination{ + FileType: v1alpha.FileType_FILE_TYPE_PARQUET, + OutputPrefixUri: outputURI, + }, + }, + } + res, err := materializationClient.CreateMaterialization(ctx, &v1alpha.CreateMaterializationRequest{ Materialization: &v1alpha.Materialization{ - MaterializationName: "transaction_details", + MaterializationName: materializationName, Expression: query, - Destination: &v1alpha.Destination{ - Destination: &v1alpha.Destination_Redis{ - Redis: &v1alpha.RedisDestination{ - HostName: "redis", - Port: 6379, - DatabaseNumber: int32(redisDb), - }, - }, - }, + Destination: destination, }, }) Expect(err).ShouldNot(HaveOccurredGrpc()) Expect(res).ShouldNot(BeNil()) }) - It("Should upload results to redis before terminating", func() { + It("Should output results to a file before terminating", func() { Eventually(func(g Gomega) { - dataType, shape, values, err := redisAIClient.TensorGetValues(key1) - g.Expect(err).ShouldNot(HaveOccurred()) - g.Expect(dataType).Should(Equal("INT64")) - g.Expect(shape).Should(Equal([]int64{1, 3})) - g.Expect(values).Should(Equal([]int64{20150106, 149, 149})) - }, "30s", "1s").Should(Succeed()) + filePaths := helpers.EventuallyListOutputFiles(materializationName + "/0", g) - Eventually(func(g Gomega) { - dataType, shape, values, err := redisAIClient.TensorGetValues(key2) - g.Expect(err).ShouldNot(HaveOccurred()) - g.Expect(dataType).Should(Equal("INT64")) - g.Expect(shape).Should(Equal([]int64{1, 3})) - g.Expect(values).Should(Equal([]int64{20150104, 149, 149})) + results := helpers.DownloadParquet(filePaths[0]) + g.Expect(len(results)).Should(Equal(50000)) }, "30s", "1s").Should(Succeed()) }) }) @@ -230,23 +218,21 @@ max_spent_in_single_transaction: max(transactions.price * transactions.quantity) helpers.LoadTestFileIntoTable(ctx, conn, table, "transactions/transactions_part2.parquet") }) - It("Should upload new results to redis before terminating", func() { + It("Should output results to a file before terminating", func() { Eventually(func(g Gomega) { - dataType, shape, values, err := redisAIClient.TensorGetValues(key1) - g.Expect(err).ShouldNot(HaveOccurred()) - g.Expect(dataType).Should(Equal("INT64")) - g.Expect(shape).Should(Equal([]int64{1, 3})) - g.Expect(values).Should(Equal([]int64{20150109, 149, 100})) - }, "30s", "1s").Should(Succeed()) + filePaths := helpers.EventuallyListOutputFiles(materializationName + "/1", g) - Eventually(func(g Gomega) { - dataType, shape, values, err := redisAIClient.TensorGetValues(key2) - g.Expect(err).ShouldNot(HaveOccurred()) - g.Expect(dataType).Should(Equal("INT64")) - g.Expect(shape).Should(Equal([]int64{1, 3})) - g.Expect(values).Should(Equal([]int64{20150111, 149, 149})) + results := helpers.DownloadParquet(filePaths[0]) + g.Expect(len(results)).Should(Equal(10000)) }, "30s", "1s").Should(Succeed()) }) }) + + Describe("Cleeanup the materialization used in the test", func() { + It("Should work without error", func() { + _, err := materializationClient.DeleteMaterialization(ctx, &v1alpha.DeleteMaterializationRequest{MaterializationName: materializationName}) + Expect(err).ShouldNot(HaveOccurred()) + }) + }) }) }) diff --git a/tests/integration/api/mat_pulsar_to_obj_store_test.go b/tests/integration/api/mat_pulsar_to_obj_store_test.go index db33754aa..b77755109 100644 --- a/tests/integration/api/mat_pulsar_to_obj_store_test.go +++ b/tests/integration/api/mat_pulsar_to_obj_store_test.go @@ -174,7 +174,7 @@ var _ = Describe("Materialization from Pulsar to ObjectStore", Ordered, Label("p results := helpers.GetCSV(outputPath + firstFileName) g.Expect(results).Should(HaveLen(2)) //header row + 1 data row g.Expect(results[0]).Should(ContainElements("_time", "_subsort", "_key_hash", "last_id", "last_time", "count")) - g.Expect(results[1]).Should(ContainElements("2023-06-20T23:30:01.000000000", "0", "2122274938272070218", "9", "9", "1687303801000000000", "1")) + g.Expect(results[1]).Should(ContainElements("2023-06-20T23:30:01.000000000", "0", "15653042715643359010", "9", "9", "1687303801000000000", "1")) }, "10s", "1s").Should(Succeed()) }) }) @@ -197,15 +197,12 @@ var _ = Describe("Materialization from Pulsar to ObjectStore", Ordered, Label("p g.Expect(dirs).Should(HaveLen(1)) for _, dir := range dirs { - if dir.Name() == firstFileName { - continue - } results := helpers.GetCSV(outputPath + dir.Name()) g.Expect(results).Should(HaveLen(4)) //header row + 3 data row g.Expect(results[0]).Should(ContainElements("_time", "_subsort", "_key_hash", "last_id", "last_time", "count")) - g.Expect(results[1]).Should(ContainElements("2023-06-20T23:30:01.000000000", "0", "2122274938272070218", "9", "9", "1687303801000000000", "1")) - g.Expect(results[1]).Should(ContainElements("2023-06-20T23:30:03.000000000", "1", "1575016611515860288", "2", "2", "1687303803000000000", "1")) - g.Expect(results[1]).Should(ContainElements("2023-06-20T23:30:05.000000000", "2", "11820145550582457114", "4", "4", "1687303805000000000", "1")) + g.Expect(results[1]).Should(ContainElements("2023-06-20T23:30:01.000000000", "0", "15653042715643359010", "9", "9", "1687303801000000000", "1")) + g.Expect(results[2]).Should(ContainElements("2023-06-20T23:30:03.000000000", "1", "2694864431690786590", "2", "2", "1687303803000000000", "1")) + g.Expect(results[3]).Should(ContainElements("2023-06-20T23:30:05.000000000", "2", "17062639839782733832", "4", "4", "1687303805000000000", "1")) } }, "10s", "1s").Should(Succeed()) }) diff --git a/tests/integration/api/materializations_test.go b/tests/integration/api/materializations_test.go index 57b002da0..bb16576dd 100644 --- a/tests/integration/api/materializations_test.go +++ b/tests/integration/api/materializations_test.go @@ -16,7 +16,7 @@ import ( . "github.com/kaskada-ai/kaskada/tests/integration/shared/matchers" ) -var _ = Describe("Materializations", Ordered, Label("redis"), Label("redis-ai"), func() { +var _ = Describe("Materializations", Ordered, func() { var ctx context.Context var cancel context.CancelFunc var conn *grpc.ClientConn diff --git a/tests/integration/api/queries_v1_test.go b/tests/integration/api/queries_v1_test.go index 61cecf372..32f1fd739 100644 --- a/tests/integration/api/queries_v1_test.go +++ b/tests/integration/api/queries_v1_test.go @@ -237,7 +237,7 @@ var _ = Describe("Queries V1", Ordered, Label("pulsar"), func() { Expect(stream).ShouldNot(BeNil()) queryResponses, err := helpers.GetCreateQueryResponses(stream) Expect(err).Should(HaveOccurredGrpc()) - Expect(queryResponses).Should(BeEmpty()) + Expect(queryResponses).Should(HaveLen(1)) //inspect error response errStatus, ok := status.FromError(err) diff --git a/tests/integration/api/queries_v2_test.go b/tests/integration/api/queries_v2_test.go index 13d59b046..40be09df9 100644 --- a/tests/integration/api/queries_v2_test.go +++ b/tests/integration/api/queries_v2_test.go @@ -112,7 +112,6 @@ var _ = Describe("Queries V2", Ordered, func() { Expect(config.ExperimentalFeatures).Should(BeNil()) Expect(config.Limits).Should(BeNil()) Expect(config.Destination.GetObjectStore()).ShouldNot(BeNil()) - Expect(config.Destination.GetRedis()).Should(BeNil()) Expect(config.ResultBehavior.GetAllResults()).ShouldNot(BeNil()) Expect(config.ResultBehavior.GetFinalResults()).Should(BeNil()) Expect(config.Slice).Should(BeNil()) diff --git a/tests/integration/api/query_v1_panic_test.go b/tests/integration/api/query_v1_panic_test.go index bb49c51ce..2da1b9e7f 100644 --- a/tests/integration/api/query_v1_panic_test.go +++ b/tests/integration/api/query_v1_panic_test.go @@ -12,7 +12,6 @@ import ( "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" - "google.golang.org/protobuf/types/known/wrapperspb" "github.com/jt-nti/gproto" v1alpha "github.com/kaskada-ai/kaskada/gen/proto/go/kaskada/kaskada/v1alpha" @@ -20,7 +19,7 @@ import ( . "github.com/kaskada-ai/kaskada/tests/integration/shared/matchers" ) -var _ = Describe("Query V1 when Sparrow panics", Ordered, Label("sparrow-panic"), func() { +var _ = Describe("Query V1 when Sparrow panics", Ordered, func() { var ctx context.Context var cancel context.CancelFunc var conn *grpc.ClientConn @@ -30,10 +29,6 @@ var _ = Describe("Query V1 when Sparrow panics", Ordered, Label("sparrow-panic") var tableName string BeforeAll(func() { - if strings.Contains(os.Getenv("ENV"), "local") { - Skip("tests running locally, skipping sparrow panic test") - } - //get connection to wren ctx, cancel, conn = grpcConfig.GetContextCancelConnection(30) ctx = metadata.AppendToOutgoingContext(ctx, "client-id", *integrationClientID) @@ -49,9 +44,6 @@ var _ = Describe("Query V1 when Sparrow panics", Ordered, Label("sparrow-panic") TableName: tableName, TimeColumnName: "purchase_time", EntityKeyColumnName: "customer_id", - SubsortColumnName: &wrapperspb.StringValue{ - Value: "subsort_id", - }, } _, err := tableClient.CreateTable(ctx, &v1alpha.CreateTableRequest{Table: table}) Expect(err).ShouldNot(HaveOccurredGrpc()) @@ -75,78 +67,132 @@ var _ = Describe("Query V1 when Sparrow panics", Ordered, Label("sparrow-panic") conn.Close() }) - It("should be reported in a timely manner", func() { - createQueryRequest := &v1alpha.CreateQueryRequest{ - Query: &v1alpha.Query{ - Expression: "__INTERNAL_COMPILE_PANIC__", - Destination: destination, - ResultBehavior: v1alpha.Query_RESULT_BEHAVIOR_ALL_RESULTS, - }, - QueryOptions: &v1alpha.QueryOptions{ - PresignResults: true, - }, - } - - stream, err := queryClient.CreateQuery(ctx, createQueryRequest) - Expect(err).ShouldNot(HaveOccurredGrpc()) - Expect(stream).ShouldNot(BeNil()) - - res, err := helpers.GetMergedCreateQueryResponse(stream) - Expect(err).Should(HaveOccurred()) - Expect(res).Should(BeNil()) - - //inspect error response - errStatus, ok := status.FromError(err) - Expect(ok).Should(BeTrue()) - Expect(errStatus.Code()).Should(Equal(codes.Internal)) - Expect(errStatus.Message()).Should(ContainSubstring("internal error")) - }) - - It("should support queries after ", func() { - createQueryRequest := &v1alpha.CreateQueryRequest{ - Query: &v1alpha.Query{ - Expression: ` + Describe("sparrow panic", Ordered, Label("sparrow-panic"), func() { + It("should be reported in a timely manner", func() { + if strings.Contains(os.Getenv("ENV"), "local") { + Skip("tests running locally, skipping sparrow panic test") + } + + createQueryRequest := &v1alpha.CreateQueryRequest{ + Query: &v1alpha.Query{ + Expression: "__INTERNAL_COMPILE_PANIC__", + Destination: destination, + ResultBehavior: v1alpha.Query_RESULT_BEHAVIOR_ALL_RESULTS, + }, + QueryOptions: &v1alpha.QueryOptions{ + PresignResults: true, + }, + } + + stream, err := queryClient.CreateQuery(ctx, createQueryRequest) + Expect(err).ShouldNot(HaveOccurredGrpc()) + Expect(stream).ShouldNot(BeNil()) + + res, err := helpers.GetMergedCreateQueryResponse(stream) + Expect(err).Should(HaveOccurred()) + Expect(res).Should(BeNil()) + + //inspect error response + errStatus, ok := status.FromError(err) + Expect(ok).Should(BeTrue()) + Expect(errStatus.Code()).Should(Equal(codes.Internal)) + Expect(errStatus.Message()).Should(ContainSubstring("internal error")) + }) + + It("should support queries after ", func() { + if strings.Contains(os.Getenv("ENV"), "local") { + Skip("tests running locally, skipping sparrow panic test") + } + + createQueryRequest := &v1alpha.CreateQueryRequest{ + Query: &v1alpha.Query{ + Expression: ` { time: query_v1_panic.purchase_time, entity: query_v1_panic.customer_id, max_amount: query_v1_panic.amount | max(), min_amount: query_v1_panic.amount | min(), }`, - Destination: destination, - ResultBehavior: v1alpha.Query_RESULT_BEHAVIOR_ALL_RESULTS, - }, - QueryOptions: &v1alpha.QueryOptions{ - PresignResults: true, - }, - } + Destination: destination, + ResultBehavior: v1alpha.Query_RESULT_BEHAVIOR_ALL_RESULTS, + }, + QueryOptions: &v1alpha.QueryOptions{ + PresignResults: true, + }, + } + + stream, err := queryClient.CreateQuery(ctx, createQueryRequest) + Expect(err).ShouldNot(HaveOccurredGrpc()) + Expect(stream).ShouldNot(BeNil()) + + res, err := helpers.GetMergedCreateQueryResponse(stream) + Expect(err).ShouldNot(HaveOccurred()) + + VerifyRequestDetails(res.RequestDetails) + Expect(res.GetDestination().GetObjectStore().GetOutputPaths().GetPaths()).ShouldNot(BeNil()) + Expect(res.GetDestination().GetObjectStore().GetOutputPaths().Paths).Should(HaveLen(1)) + + Expect(res.Analysis.Schema).Should(ContainElements( + gproto.Equal(primitiveSchemaField("time", v1alpha.DataType_PRIMITIVE_TYPE_TIMESTAMP_NANOSECOND)), + gproto.Equal(primitiveSchemaField("entity", v1alpha.DataType_PRIMITIVE_TYPE_STRING)), + gproto.Equal(primitiveSchemaField("max_amount", v1alpha.DataType_PRIMITIVE_TYPE_I64)), + gproto.Equal(primitiveSchemaField("min_amount", v1alpha.DataType_PRIMITIVE_TYPE_I64)), + )) + + resultsUrl := res.GetDestination().GetObjectStore().GetOutputPaths().Paths[0] + firstResults := helpers.DownloadParquet(resultsUrl) + + Expect(firstResults).Should(HaveLen(10)) + Expect(firstResults[9]).Should(MatchFields(IgnoreExtras, Fields{ + "Time": PointTo(BeEquivalentTo(1578182400000000000)), + "Entity": PointTo(Equal("patrick")), + "Max_amount": PointTo(BeEquivalentTo(5000)), + "Min_amount": PointTo(BeEquivalentTo(3)), + })) + }) + }) + + Describe("wren panic", Label("wren-panic"), func() { + It("doesn't panic on a malformed query (missing comma in record)", func() { + createQueryRequest := &v1alpha.CreateQueryRequest{ + Query: &v1alpha.Query{ + Expression: ` +{ +time: query_v1_panic.purchase_time +entity: query_v1_panic.customer_id +}`, + Destination: destination, + ResultBehavior: v1alpha.Query_RESULT_BEHAVIOR_ALL_RESULTS, + }, + QueryOptions: &v1alpha.QueryOptions{ + PresignResults: true, + }, + } + + stream, err := queryClient.CreateQuery(ctx, createQueryRequest) + Expect(err).ShouldNot(HaveOccurredGrpc()) + Expect(stream).ShouldNot(BeNil()) + + res, err := helpers.GetMergedCreateQueryResponse(stream) + Expect(err).ShouldNot(HaveOccurred()) + Expect(res).ShouldNot(BeNil()) + + Expect(res).ShouldNot(BeNil()) + Expect(res.RequestDetails.RequestId).ShouldNot(BeEmpty()) + Expect(res.GetDestination().GetObjectStore().GetOutputPaths().GetPaths()).Should(BeNil()) + + Expect(res.State).Should(Equal(v1alpha.CreateQueryResponse_STATE_FAILURE)) + + Expect(res.Analysis.CanExecute).Should(BeFalse()) + + Expect(res.FenlDiagnostics).ShouldNot(BeNil()) + Expect(res.FenlDiagnostics.NumErrors).Should(BeEquivalentTo(1)) + + diagnostics := res.FenlDiagnostics.FenlDiagnostics + Expect(diagnostics).Should(HaveLen(1)) + Expect(diagnostics).Should(ContainElement(ContainSubstring("Invalid syntax"))) + Expect(diagnostics).Should(ContainElement(ContainSubstring("Invalid token 'entity'"))) + }) - stream, err := queryClient.CreateQuery(ctx, createQueryRequest) - Expect(err).ShouldNot(HaveOccurredGrpc()) - Expect(stream).ShouldNot(BeNil()) - - res, err := helpers.GetMergedCreateQueryResponse(stream) - Expect(err).ShouldNot(HaveOccurred()) - - VerifyRequestDetails(res.RequestDetails) - Expect(res.GetDestination().GetObjectStore().GetOutputPaths().GetPaths()).ShouldNot(BeNil()) - Expect(res.GetDestination().GetObjectStore().GetOutputPaths().Paths).Should(HaveLen(1)) - - Expect(res.Analysis.Schema).Should(ContainElements( - gproto.Equal(primitiveSchemaField("time", v1alpha.DataType_PRIMITIVE_TYPE_TIMESTAMP_NANOSECOND)), - gproto.Equal(primitiveSchemaField("entity", v1alpha.DataType_PRIMITIVE_TYPE_STRING)), - gproto.Equal(primitiveSchemaField("max_amount", v1alpha.DataType_PRIMITIVE_TYPE_I64)), - gproto.Equal(primitiveSchemaField("min_amount", v1alpha.DataType_PRIMITIVE_TYPE_I64)), - )) - - resultsUrl := res.GetDestination().GetObjectStore().GetOutputPaths().Paths[0] - firstResults := helpers.DownloadParquet(resultsUrl) - - Expect(firstResults).Should(HaveLen(10)) - Expect(firstResults[9]).Should(MatchFields(IgnoreExtras, Fields{ - "Time": PointTo(BeEquivalentTo(1578182400000000000)), - "Entity": PointTo(Equal("patrick")), - "Max_amount": PointTo(BeEquivalentTo(5000)), - "Min_amount": PointTo(BeEquivalentTo(3)), - })) }) }) diff --git a/tests/integration/api/query_v1_rest_test.go b/tests/integration/api/query_v1_rest_test.go index ae863b299..3787ab5ce 100644 --- a/tests/integration/api/query_v1_rest_test.go +++ b/tests/integration/api/query_v1_rest_test.go @@ -61,7 +61,7 @@ var _ = PDescribe("Query V1 REST", Ordered, func() { loadReq := loadRequestJson{ TableName: table.TableName, - ParquetFileUri: helpers.GetFileURI("purchases_part1.parquet"), + ParquetFileUri: helpers.GetTestFileURI("purchases_part1.parquet"), } jsonBody, err = json.Marshal(loadReq) diff --git a/tests/integration/api/query_v1_slicing_test.go b/tests/integration/api/query_v1_slicing_test.go index bfeaf425b..fc1be5bbb 100644 --- a/tests/integration/api/query_v1_slicing_test.go +++ b/tests/integration/api/query_v1_slicing_test.go @@ -151,7 +151,7 @@ max_spent_in_single_transaction: max(transactions_slicing.price * transactions_s resultsUrl := res.GetDestination().GetObjectStore().GetOutputPaths().Paths[0] results := helpers.DownloadParquet(resultsUrl) - helpers.LogLn(fmt.Sprintf("Result set size, with 100%% slice: %d", len(results))) + helpers.LogLn(fmt.Sprintf("Result set size, with 100 percent slice: %d", len(results))) Expect(len(results)).Should(Equal(rowCount)) }) @@ -197,7 +197,7 @@ max_spent_in_single_transaction: max(transactions_slicing.price * transactions_s resultsUrl := res.GetDestination().GetObjectStore().GetOutputPaths().Paths[0] results := helpers.DownloadParquet(resultsUrl) - helpers.LogLn(fmt.Sprintf("Result set size, with 10%% slice: %d", len(results))) + helpers.LogLn(fmt.Sprintf("Result set size, with 10 percent slice: %d", len(results))) /* * There are 150 unique entities in this dataset. @@ -212,8 +212,8 @@ max_spent_in_single_transaction: max(transactions_slicing.price * transactions_s }) }) - Describe("Run the query with a 0.3% slice", func() { - It("should return about 0.3% of the results", func() { + Describe("Run the query with a 1% slice", func() { + It("should return about 1% of the results", func() { destination := &v1alpha.Destination{} destination.Destination = &v1alpha.Destination_ObjectStore{ ObjectStore: &v1alpha.ObjectStoreDestination{ @@ -228,7 +228,7 @@ max_spent_in_single_transaction: max(transactions_slicing.price * transactions_s Slice: &v1alpha.SliceRequest{ Slice: &v1alpha.SliceRequest_Percent{ Percent: &v1alpha.SliceRequest_PercentSlice{ - Percent: 0.3, + Percent: 1, }, }, }, @@ -252,18 +252,17 @@ max_spent_in_single_transaction: max(transactions_slicing.price * transactions_s resultsUrl := res.GetDestination().GetObjectStore().GetOutputPaths().Paths[0] results := helpers.DownloadParquet(resultsUrl) - helpers.LogLn(fmt.Sprintf("Result set size, with 0.3%% slice: %d", len(results))) + helpers.LogLn(fmt.Sprintf("Result set size, with 1 percent slice: %d", len(results))) /* * There are 150 unique entities in this dataset. * Each entity has an average of 333.3 events. * The total dataset size is 50,000 (150 * 333.3 = 49,995) - * Assuming uniform distribution (not entirely true), then 0.03% of the entities = ~1 entites (0.45 entities) - * Since the 0.3% slice is based on a hashing function, there is some room for error. - * Random Heuristic: Lower Bound -> 0% (0 entities) and Upper Bound -> 1% (1.5 entities) - * Lower Bound: 0 (0 * 333.3) and Upper Bound: 499.95 (1.5 * 333.3) + * Assuming uniform distribution (not entirely true), then 1% of the entities = 1.5 entites + * Since the 1% slice is based on a hashing function, there is some room for error. + * Lower Bound: 333 (1 * 333.3) and Upper Bound: 666 (2 * 333.3) */ - Expect(len(results)).Should(BeNumerically("~", 250, 125)) + Expect(len(results)).Should(BeNumerically("~", 500, 200)) }) }) diff --git a/tests/integration/api/query_v1_test.go b/tests/integration/api/query_v1_test.go index e90e3ab22..0ee7177ea 100644 --- a/tests/integration/api/query_v1_test.go +++ b/tests/integration/api/query_v1_test.go @@ -193,7 +193,7 @@ min_amount: query_v1_test.amount | min(), SourceData: &v1alpha.LoadDataRequest_FileInput{ FileInput: &v1alpha.FileInput{ FileType: v1alpha.FileType_FILE_TYPE_PARQUET, - Uri: helpers.GetFileURI("purchases/purchases_part2.parquet"), + Uri: helpers.GetTestFileURI("purchases/purchases_part2.parquet"), }, }, CopyToFilesystem: true, diff --git a/tests/integration/api/tables_test.go b/tests/integration/api/tables_test.go index 667999c6b..b9cf4013e 100644 --- a/tests/integration/api/tables_test.go +++ b/tests/integration/api/tables_test.go @@ -236,7 +236,7 @@ var _ = Describe("Tables", Ordered, func() { SourceData: &v1alpha.LoadDataRequest_FileInput{ FileInput: &v1alpha.FileInput{ FileType: v1alpha.FileType_FILE_TYPE_PARQUET, - Uri: helpers.GetFileURI(fileName), + Uri: helpers.GetTestFileURI(fileName), }, }, }) @@ -298,7 +298,7 @@ var _ = Describe("Tables", Ordered, func() { SourceData: &v1alpha.LoadDataRequest_FileInput{ FileInput: &v1alpha.FileInput{ FileType: v1alpha.FileType_FILE_TYPE_PARQUET, - Uri: helpers.GetFileURI(fileName), + Uri: helpers.GetTestFileURI(fileName), }, }, }) @@ -320,7 +320,7 @@ var _ = Describe("Tables", Ordered, func() { SourceData: &v1alpha.LoadDataRequest_FileInput{ FileInput: &v1alpha.FileInput{ FileType: v1alpha.FileType_FILE_TYPE_PARQUET, - Uri: helpers.GetFileURI(fileName), + Uri: helpers.GetTestFileURI(fileName), }, }, }) @@ -366,7 +366,7 @@ var _ = Describe("Tables", Ordered, func() { SourceData: &v1alpha.LoadDataRequest_FileInput{ FileInput: &v1alpha.FileInput{ FileType: v1alpha.FileType_FILE_TYPE_PARQUET, - Uri: helpers.GetFileURI(fileName), + Uri: helpers.GetTestFileURI(fileName), }, }, CopyToFilesystem: true, @@ -387,7 +387,7 @@ var _ = Describe("Tables", Ordered, func() { SourceData: &v1alpha.LoadDataRequest_FileInput{ FileInput: &v1alpha.FileInput{ FileType: v1alpha.FileType_FILE_TYPE_PARQUET, - Uri: helpers.GetFileURI(fileName), + Uri: helpers.GetTestFileURI(fileName), }, }, CopyToFilesystem: true, @@ -538,7 +538,7 @@ var _ = Describe("Tables", Ordered, func() { SourceData: &v1alpha.LoadDataRequest_FileInput{ FileInput: &v1alpha.FileInput{ FileType: v1alpha.FileType_FILE_TYPE_PARQUET, - Uri: helpers.GetFileURI(fileName), + Uri: helpers.GetTestFileURI(fileName), }, }, }) diff --git a/tests/integration/cli/cli_suite_test.go b/tests/integration/cli/cli_suite_test.go index d1bfd3f1e..2b1d49f5b 100644 --- a/tests/integration/cli/cli_suite_test.go +++ b/tests/integration/cli/cli_suite_test.go @@ -40,7 +40,7 @@ var ( grpcConfig helpers.HostConfig ) -// Before starting tests, delete all tables associated with the Integration clientID. Also completely wipes connected RedisAI instance. +// Before starting tests, delete all tables associated with the Integration clientID. var _ = BeforeSuite(func() { flag.Parse() diff --git a/tests/integration/docker-compose.yml b/tests/integration/docker-compose.yml index 23e2941fc..f019de962 100644 --- a/tests/integration/docker-compose.yml +++ b/tests/integration/docker-compose.yml @@ -4,7 +4,7 @@ networks: integration: services: - + pulsar: image: apachepulsar/pulsar:3.0.0 container_name: pulsar @@ -24,7 +24,7 @@ services: retries: 5 start_period: 20s restart: unless-stopped - volumes: + volumes: - pulsardata:/pulsar/data - pulsarconf:/pulsar/conf @@ -35,7 +35,7 @@ services: container_name: kaskada depends_on: - pulsar - environment: + environment: # for sparrow SPARROW_LOG_FILTER: "egg::=warn,sparrow_=info,info" # for wren @@ -47,7 +47,7 @@ services: TMPDIR: "/data/tmp" logging: driver: "json-file" - options: + options: tag: "{{.ImageName}}|{{.Name}}|{{.ImageFullID}}|{{.FullID}}" networks: - integration @@ -59,7 +59,7 @@ services: - ../../testdata:/testdata restart: unless-stopped -volumes: +volumes: pulsardata: pulsarconf: diff --git a/tests/integration/shared/go.mod b/tests/integration/shared/go.mod index bccf971de..00b630d20 100644 --- a/tests/integration/shared/go.mod +++ b/tests/integration/shared/go.mod @@ -72,7 +72,7 @@ require ( github.com/shirou/gopsutil/v3 v3.22.9 // indirect github.com/sirupsen/logrus v1.9.0 // indirect github.com/stretchr/objx v0.5.0 // indirect - github.com/stretchr/testify v1.8.1 // indirect + github.com/stretchr/testify v1.8.4 // indirect github.com/tinylib/msgp v1.1.6 // indirect github.com/tklauser/go-sysconf v0.3.10 // indirect github.com/tklauser/numcpus v0.5.0 // indirect diff --git a/tests/integration/shared/go.sum b/tests/integration/shared/go.sum index 9b9c49b47..95108dac4 100644 --- a/tests/integration/shared/go.sum +++ b/tests/integration/shared/go.sum @@ -564,8 +564,9 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.5/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/tinylib/msgp v1.1.6 h1:i+SbKraHhnrf9M5MYmvQhFnbLhAXSDWF8WWsuyRdocw= github.com/tinylib/msgp v1.1.6/go.mod h1:75BAfg2hauQhs3qedfdDZmWAPcFMAvJE5b9rGOMufyw= github.com/tklauser/go-sysconf v0.3.10 h1:IJ1AZGZRWbY8T5Vfk04D9WOA5WSejdflXxP03OUqALw= diff --git a/tests/integration/shared/helpers/helpers.go b/tests/integration/shared/helpers/helpers.go index 5c2e4e9e5..31aad3c01 100644 --- a/tests/integration/shared/helpers/helpers.go +++ b/tests/integration/shared/helpers/helpers.go @@ -26,6 +26,7 @@ import ( . "github.com/kaskada-ai/kaskada/tests/integration/shared/matchers" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" + "github.com/onsi/gomega/types" "github.com/xitongsys/parquet-go-source/local" "github.com/xitongsys/parquet-go/reader" "google.golang.org/grpc" @@ -35,6 +36,11 @@ import ( v1alpha "github.com/kaskada-ai/kaskada/gen/proto/go/kaskada/kaskada/v1alpha" ) +const ( + localOutputPath string = "../data/output" + localTestdataPath string = "../../../testdata" +) + // HostConfig holds the data needed to connect to a particular grpc server type HostConfig struct { Hostname string @@ -104,6 +110,14 @@ func DownloadParquet(url string) []interface{} { return rows } +func TestsAreRunningLocally() bool { + return os.Getenv("ENV") == "local-local" +} + +func TestsAreRunningLocallyInDocker() bool { + return os.Getenv("ENV") == "local-docker" +} + func downloadFile(url string) (localPath string, cleanup func()) { if strings.HasPrefix(url, "http://") { // download to temp file @@ -125,17 +139,79 @@ func downloadFile(url string) (localPath string, cleanup func()) { } localPath = strings.TrimPrefix(url, "file://") cleanup = func() {} - if os.Getenv("ENV") != "local-local" { - localPath = fmt.Sprintf("../%s", localPath) + if TestsAreRunningLocallyInDocker() && !strings.HasPrefix(localPath, "..") { + localPath = fmt.Sprintf("..%s", localPath) } return } -func GetFileURI(fileName string) string { - if os.Getenv("ENV") == "local-local" { + + +// Gets an output path URI +func GetOutputPathURI(subPath string) string { + subPath = vfs_utils.EnsureTrailingSlash(subPath) + if TestsAreRunningLocally() { workDir, err := os.Getwd() Expect(err).ShouldNot(HaveOccurred()) - path := filepath.Join(workDir, "../../../testdata", fileName) + path := filepath.Join(workDir, localOutputPath, subPath) + return fmt.Sprintf("file://%s", path) + } + return fmt.Sprintf("file:///data/output/%s", subPath) +} + +// Deletes all files at the output path +func EmptyOutputPath(subPath string) { + subPath = vfs_utils.EnsureTrailingSlash(subPath) + workDir, err := os.Getwd() + Expect(err).ShouldNot(HaveOccurred()) + path := filepath.Join(workDir, localOutputPath, subPath) + err = os.RemoveAll(path) + Expect(err).ShouldNot(HaveOccurred()) +} + +// Lists all files at the output path +func ListOutputFiles(subPath string) []string { + subPath = vfs_utils.EnsureTrailingSlash(subPath) + workDir, err := os.Getwd() + Expect(err).ShouldNot(HaveOccurred()) + path := filepath.Join(workDir, localOutputPath, subPath) + dirEntries, err := os.ReadDir(path) + Expect(err).ShouldNot(HaveOccurred()) + + paths := []string{} + for _, dirEntry := range dirEntries { + if !dirEntry.IsDir() { + paths = append(paths, filepath.Join(path, dirEntry.Name())) + } + } + return paths +} + +// Lists all files at the output path +func EventuallyListOutputFiles(subPath string, g types.Gomega) []string { + subPath = vfs_utils.EnsureTrailingSlash(subPath) + workDir, err := os.Getwd() + g.Expect(err).ShouldNot(HaveOccurred()) + path := filepath.Join(workDir, localOutputPath, subPath) + dirEntries, err := os.ReadDir(path) + g.Expect(err).ShouldNot(HaveOccurred()) + + paths := []string{} + for _, dirEntry := range dirEntries { + if !dirEntry.IsDir() { + paths = append(paths, filepath.Join(localOutputPath, subPath, dirEntry.Name())) + } + } + g.Expect(len(paths)).Should(BeNumerically(">", 0)) + return paths +} + +// Gets a file URI for file in the testdata path +func GetTestFileURI(fileName string) string { + if TestsAreRunningLocally() { + workDir, err := os.Getwd() + Expect(err).ShouldNot(HaveOccurred()) + path := filepath.Join(workDir, localTestdataPath, fileName) return fmt.Sprintf("file://%s", path) } return fmt.Sprintf("file:///testdata/%s", fileName) @@ -143,7 +219,7 @@ func GetFileURI(fileName string) string { // Reads a file from the testdata path func ReadTestFile(fileName string) []byte { - filePath := fmt.Sprintf("../../../testdata/%s", fileName) + filePath := fmt.Sprintf("%s/%s", localTestdataPath, fileName) fileData, err := os.ReadFile(filePath) Expect(err).ShouldNot(HaveOccurred(), fmt.Sprintf("issue reading testdata file: %s", fileName)) return fileData @@ -151,14 +227,14 @@ func ReadTestFile(fileName string) []byte { // Writes a file to the testdata path func WriteTestFile(fileName string, data []byte) { - filePath := fmt.Sprintf("../../../testdata/%s", fileName) + filePath := fmt.Sprintf("%s/%s", localTestdataPath, fileName) err := os.WriteFile(filePath, data, 0666) Expect(err).ShouldNot(HaveOccurred(), fmt.Sprintf("issue writing testdata file: %s", fileName)) } // Deletes a file from the testdata path func DeleteTestFile(fileName string) { - filePath := fmt.Sprintf("../../../testdata/%s", fileName) + filePath := fmt.Sprintf("%s/%s", localTestdataPath, fileName) if fileExists(filePath) { err := os.Remove(filePath) Expect(err).ShouldNot(HaveOccurred(), fmt.Sprintf("issue deleting testdata file: %s", fileName)) @@ -194,7 +270,7 @@ func LoadTestFilesIntoTable(ctx context.Context, conn *grpc.ClientConn, table *v SourceData: &v1alpha.LoadDataRequest_FileInput{ FileInput: &v1alpha.FileInput{ FileType: fileType, - Uri: GetFileURI(fileName), + Uri: GetTestFileURI(fileName), }, }, } @@ -364,11 +440,6 @@ func GetMergedCreateQueryResponse(stream v1alpha.QueryService_CreateQueryClient) if queryResponse.RequestDetails != nil { mergedResponse.RequestDetails = queryResponse.RequestDetails } - if queryResponse.GetDestination().GetRedis() != nil { - mergedResponse.Destination = &v1alpha.Destination{ - Destination: &v1alpha.Destination_Redis{Redis: queryResponse.GetDestination().GetRedis()}, - } - } if queryResponse.GetDestination().GetObjectStore().GetOutputPaths() != nil { newPaths := queryResponse.GetDestination().GetObjectStore().GetOutputPaths().Paths existingPaths := []string{} diff --git a/wren/.version b/wren/.version index 899f24fc7..142464bf2 100644 --- a/wren/.version +++ b/wren/.version @@ -1 +1 @@ -0.9.0 \ No newline at end of file +0.11.0 \ No newline at end of file diff --git a/wren/client/object_store_client.go b/wren/client/object_store_client.go index befa3733d..9398b2873 100644 --- a/wren/client/object_store_client.go +++ b/wren/client/object_store_client.go @@ -242,7 +242,7 @@ func (c objectStoreClient) GetPresignedDownloadURL(ctx context.Context, URI stri switch c.objectStoreType { case object_store_type_local: - presignedURL = file.Path() + presignedURL = fmt.Sprintf("file://%s", file.Path()) return case object_store_type_s3: diff --git a/wren/compute/compute_manager.go b/wren/compute/compute_manager.go index c632ff79c..b7e45ee5b 100644 --- a/wren/compute/compute_manager.go +++ b/wren/compute/compute_manager.go @@ -284,9 +284,6 @@ func (m *computeManager) processMaterializations(requestCtx context.Context, own case *v1alpha.Destination_Pulsar: matLogger.Info().Interface("type", kind).Str("when", "pre-compute").Msg("materializating to pulsar") destination.Destination = kind - case *v1alpha.Destination_Redis: - matLogger.Info().Interface("type", kind).Str("when", "pre-compute").Msg("materializing to redis") - destination.Destination = kind default: matLogger.Error().Interface("type", kind).Str("when", "pre-compute").Msg("materialization output type not implemented") return fmt.Errorf("materialization output type %s is not implemented", kind) diff --git a/wren/property/query_result_type.go b/wren/property/query_result_type.go index 526965349..60e735ece 100644 --- a/wren/property/query_result_type.go +++ b/wren/property/query_result_type.go @@ -5,7 +5,6 @@ type QueryResultType string const ( QueryResultTypeUnspecified QueryResultType = "UNSPECIFIED" QueryResultTypeParquet QueryResultType = "PARQUET" - QueryResultTypeRedisBulk QueryResultType = "REDIS_BULK" ) // Values provides list valid values for Enum. @@ -13,7 +12,6 @@ func (QueryResultType) Values() (kinds []string) { resulttypes := []QueryResultType{ QueryResultTypeUnspecified, QueryResultTypeParquet, - QueryResultTypeRedisBulk, } for _, s := range resulttypes { diff --git a/wren/service/query_v1.go b/wren/service/query_v1.go index 29f738288..b8e9d8860 100644 --- a/wren/service/query_v1.go +++ b/wren/service/query_v1.go @@ -93,23 +93,6 @@ func (q *queryV1Service) CreateQuery(request *v1alpha.CreateQueryRequest, respon return wrapErrorWithStatus(err, subLogger) } - tableMap, err := q.kaskadaTableClient.GetKaskadaTablesFromNames(ctx, owner, compileResponse.FreeNames) - if err != nil { - return wrapErrorWithStatus(err, subLogger) - } - - for _, table := range tableMap { - switch table.Source.Source.(type) { - case *v1alpha.Source_Kaskada: - case *v1alpha.Source_Pulsar: - err := customerrors.NewInvalidArgumentErrorWithCustomText("query is currently not supported on tables backed by streams") - return wrapErrorWithStatus(err, subLogger) - default: - log.Error().Msgf("unknown source type %T", table.Source.Source) - return wrapErrorWithStatus(customerrors.NewInternalError("unknown table source type"), subLogger) - } - } - // Update the request views with only the views required for the query. request.Query.Views = views @@ -160,6 +143,23 @@ func (q *queryV1Service) CreateQuery(request *v1alpha.CreateQueryRequest, respon return nil } + tableMap, err := q.kaskadaTableClient.GetKaskadaTablesFromNames(ctx, owner, compileResponse.FreeNames) + if err != nil { + return wrapErrorWithStatus(err, subLogger) + } + + for _, table := range tableMap { + switch table.Source.Source.(type) { + case *v1alpha.Source_Kaskada: + case *v1alpha.Source_Pulsar: + err := customerrors.NewInvalidArgumentErrorWithCustomText("query is currently not supported on tables backed by streams") + return wrapErrorWithStatus(err, subLogger) + default: + log.Error().Msgf("unknown source type %T", table.Source.Source) + return wrapErrorWithStatus(customerrors.NewInternalError("unknown table source type"), subLogger) + } + } + kaskadaQuery, err := q.kaskadaQueryClient.CreateKaskadaQuery(ctx, owner, &ent.KaskadaQuery{ Expression: query.Expression, DataTokenID: dataToken.ID, @@ -426,7 +426,7 @@ func (q *queryV1Service) validateOutputTo(ctx context.Context, query *v1alpha.Qu default: subLogger.Warn().Interface("kind", kind).Interface("type", kind.ObjectStore.FileType).Msg("unknown output_to file_type, defaulting to 'ObjectStore->Parquet'") } - case *v1alpha.Destination_Pulsar, *v1alpha.Destination_Redis: + case *v1alpha.Destination_Pulsar: return fmt.Errorf("query output type: %s is only valid for materializations", kind) default: subLogger.Warn().Interface("kind", kind).Msg("unknown output_to, defaulting to 'ObjectStore->Parquet'") diff --git a/wren/wren b/wren/wren deleted file mode 100755 index cf1474b60..000000000 Binary files a/wren/wren and /dev/null differ