diff --git a/.dockerignore b/.dockerignore index 94ea17ac00fc3..f7ce3cba5b9d1 100644 --- a/.dockerignore +++ b/.dockerignore @@ -17,4 +17,5 @@ packages/*/coverage/ packages/cubejs-server-core/playground/ packages/cubejs-serverless packages/cubejs-serverless-aws -packages/cubejs-serverless-google \ No newline at end of file +packages/cubejs-serverless-google +packages/cubejs-backend-native/target diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 9168e02f116cd..0000000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,68 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: '' -assignees: '' - ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Minimally reproducible Cube Schema** -In case your bug report is data modelling related please put your minimally reproducible Cube Schema here. -You can use selects without tables in order to achieve that as follows. - -```javascript -cube(`Orders`, { - sql: ` - select 1 as id, 100 as amount, 'new' status - UNION ALL - select 2 as id, 200 as amount, 'new' status - UNION ALL - select 3 as id, 300 as amount, 'processed' status - UNION ALL - select 4 as id, 500 as amount, 'processed' status - UNION ALL - select 5 as id, 600 as amount, 'shipped' status - `, - measures: { - count: { - type: `count`, - }, - totalAmount: { - sql: `amount`, - type: `sum`, - }, - toRemove: { - type: `count`, - }, - }, - dimensions: { - status: { - sql: `status`, - type: `string`, - }, - }, -}); -``` - -**Version:** -[e.g. 0.4.5] - -**Additional context** -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index bbcbbe7d61558..0000000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: '' -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md deleted file mode 100644 index cc87f5c262fae..0000000000000 --- a/.github/ISSUE_TEMPLATE/question.md +++ /dev/null @@ -1,25 +0,0 @@ ---- -name: Question -about: Ask any question about Cube.js -title: '' -labels: 'question' -assignees: '' - ---- - -**Problem** - -A clear and concise description of what the problem is. Ex. How can I do X with Cube.js? [...] - -**Related Cube.js schema** -```javascript -cube(`Cube`, { - sql: `` //, ... -}) -``` - -**Related Cube.js generated SQL** - -```sql -SELECT * FROM ... -``` diff --git a/.github/ISSUE_TEMPLATE/sql_api_query_issue.md b/.github/ISSUE_TEMPLATE/sql_api_query_issue.md deleted file mode 100644 index 38d314e85bc1e..0000000000000 --- a/.github/ISSUE_TEMPLATE/sql_api_query_issue.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: SQL API Query Issue -about: Create a report to help us improve -title: '' -labels: '' -assignees: '' - ---- - -**Failed SQL** -Search for `Failed SQL` log message. - -**Logical Plan** -Search for `Can't rewrite plan` log message. - -**Version:** -[e.g. 0.4.5] - -**Additional context** -Add any other context about the problem here. diff --git a/.github/actions/deploy-example.sh b/.github/actions/deploy-example.sh deleted file mode 100755 index 700af36579aa9..0000000000000 --- a/.github/actions/deploy-example.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/sh - -# Exit on first error -set -e - -npm config set loglevel error - -# Required environment variable -EXAMPLE_SLUG=${EXAMPLE_SLUG} - -EXAMPLE_DIRECTORY=examples/${EXAMPLE_SLUG} -EXAMPLE_CUBE_SKIP=${EXAMPLE_CUBE_SKIP:-0} -EXAMPLE_FRONTEND_SKIP=${EXAMPLE_FRONTEND_SKIP:-0} -EXAMPLE_FRONTEND_SUBDIRECTORY=${EXAMPLE_FRONTEND_SUBDIRECTORY:-dashboard-app} -EXAMPLE_FRONTEND_BUILD_SUBDIRECTORY=${EXAMPLE_FRONTEND_BUILD_SUBDIRECTORY:-build} - -cd "$EXAMPLE_DIRECTORY" - -if [ "$EXAMPLE_CUBE_SKIP" -eq 0 ] -then - yarn install - npm install -g cubejs-cli - cubejs deploy -fi - -if [ "$EXAMPLE_FRONTEND_SKIP" -eq 0 ] -then - cd "$EXAMPLE_FRONTEND_SUBDIRECTORY" - yarn install - yarn build - npm install -g netlify-cli - netlify deploy --dir="$EXAMPLE_FRONTEND_BUILD_SUBDIRECTORY" --prod -fi \ No newline at end of file diff --git a/.github/actions/init-ci/action.yaml b/.github/actions/init-ci/action.yaml deleted file mode 100644 index 4634c32066412..0000000000000 --- a/.github/actions/init-ci/action.yaml +++ /dev/null @@ -1,30 +0,0 @@ -name: 'Init CI' -description: 'Boostrap repo for testing' -inputs: - node: - description: 'Node.js version' -runs: - using: "composite" - steps: - - name: Use Node.js ${{ inputs.node }} - uses: actions/setup-node@v3 - with: - node-version: ${{ inputs.node }} - - name: Cache node modules - uses: actions/cache@v3 - with: - # npm cache files are stored in `~/.npm` on Linux/macOS - path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- - - name: Yarn install - uses: borales/actions-yarn@v2.3.0 - with: - cmd: install - - name: Lerna bootstrap - run: yarn lerna bootstrap --ignore @cubejs-backend/jdbc-driver - - name: Lerna tsc - run: yarn tsc diff --git a/.github/actions/integration/athena.sh b/.github/actions/integration/athena.sh deleted file mode 100755 index 97b5eb5472263..0000000000000 --- a/.github/actions/integration/athena.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -echo "::group::Athena [cloud]" -export CUBEJS_AWS_REGION=us-east-1 -export CUBEJS_AWS_S3_OUTPUT_LOCATION=s3://cubejs-opensource/testing/output -export CUBEJS_DB_EXPORT_BUCKET=s3://cubejs-opensource/testing/export/ -yarn lerna run --concurrency 1 --stream --no-prefix integration:athena -export CUBEJS_DB_EXPORT_BUCKET=cubejs-opensource -yarn lerna run --concurrency 1 --stream --no-prefix integration:athena -# yarn lerna run --concurrency 1 --stream --no-prefix smoke:athena -echo "::endgroup::" diff --git a/.github/actions/integration/bigquery.sh b/.github/actions/integration/bigquery.sh deleted file mode 100755 index 3da50e4862455..0000000000000 --- a/.github/actions/integration/bigquery.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -echo "::group::BigQuery [cloud]" -export CUBEJS_DB_BQ_PROJECT_ID=cube-open-source -export CUBEJS_DB_EXPORT_BUCKET=cube-open-source-export-bucket -yarn lerna run --concurrency 1 --stream --no-prefix integration:bigquery -# yarn lerna run --concurrency 1 --stream --no-prefix smoke:bigquery -echo "::endgroup::" diff --git a/.github/actions/integration/clickhouse.sh b/.github/actions/integration/clickhouse.sh deleted file mode 100755 index 9630f77e3acf1..0000000000000 --- a/.github/actions/integration/clickhouse.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -export TEST_CLICKHOUSE_VERSION=23.11 - -echo "::group::Clickhouse ${TEST_CLICKHOUSE_VERSION}"; -docker pull clickhouse/clickhouse-server:${TEST_CLICKHOUSE_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:clickhouse -echo "::endgroup::" - -export TEST_CLICKHOUSE_VERSION=22.8 - -echo "::group::Clickhouse ${TEST_CLICKHOUSE_VERSION}"; -docker pull clickhouse/clickhouse-server:${TEST_CLICKHOUSE_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:clickhouse -echo "::endgroup::" - -export TEST_CLICKHOUSE_VERSION=21.8 - -echo "::group::Clickhouse ${TEST_CLICKHOUSE_VERSION}"; -docker pull clickhouse/clickhouse-server:${TEST_CLICKHOUSE_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:clickhouse -echo "::endgroup::" diff --git a/.github/actions/integration/crate.sh b/.github/actions/integration/crate.sh deleted file mode 100755 index 4597e168cd521..0000000000000 --- a/.github/actions/integration/crate.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -set -eo pipefail - -export DEBUG=testcontainers - -echo "::group::Crate" -yarn lerna run --concurrency 1 --stream --no-prefix integration:crate -echo "::endgroup::" diff --git a/.github/actions/integration/cubestore.sh b/.github/actions/integration/cubestore.sh deleted file mode 100755 index 6fb0bdb1b4c8d..0000000000000 --- a/.github/actions/integration/cubestore.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -export TEST_CUBESTORE_VERSION=latest - -echo "::group::CubeStore ${TEST_CUBESTORE_VERSION}"; -docker pull cubejs/cubestore:${TEST_CUBESTORE_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:cubestore -echo "::endgroup::" diff --git a/.github/actions/integration/druid.sh b/.github/actions/integration/druid.sh deleted file mode 100755 index 11840c126f27f..0000000000000 --- a/.github/actions/integration/druid.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -export TEST_POSTGRES_VERSION=13 -export TEST_ZOOKEEPER_VERSION=3.5 -export TEST_DRUID_VERSION=27.0.0 - -echo "::group::Druid ${TEST_DRUID_VERSION}"; - -docker pull postgres:${TEST_POSTGRES_VERSION} -docker pull zookeeper:${TEST_ZOOKEEPER_VERSION} -docker pull apache/druid:${TEST_DRUID_VERSION} - -echo "Druid ${TEST_DRUID_VERSION}"; -yarn lerna run --concurrency 1 --stream --no-prefix integration:druid -echo "::endgroup::" diff --git a/.github/actions/integration/elasticsearch.sh b/.github/actions/integration/elasticsearch.sh deleted file mode 100755 index a8e992bd1e48a..0000000000000 --- a/.github/actions/integration/elasticsearch.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -export TEST_ELASTIC_OPENDISTRO_VERSION=1.13.1 - -echo "::group::ElasticSearch Open Distro ${TEST_ELASTIC_OPENDISTRO_VERSION}"; -docker pull amazon/opendistro-for-elasticsearch:${TEST_ELASTIC_OPENDISTRO_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:elastic -echo "::endgroup::" diff --git a/.github/actions/integration/mongobi.sh b/.github/actions/integration/mongobi.sh deleted file mode 100755 index e17f856dd869b..0000000000000 --- a/.github/actions/integration/mongobi.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -export TEST_MONGO_TAG=6.0 -export TEST_MONGOBI_VERSION=mongodb-bi-linux-x86_64-ubuntu2004-v2.14.8 - -echo "::group::MongoBI" -yarn lerna run --concurrency 1 --stream --no-prefix integration:mongobi -echo "::endgroup::" diff --git a/.github/actions/integration/mssql.sh b/.github/actions/integration/mssql.sh deleted file mode 100755 index 1751f44645d17..0000000000000 --- a/.github/actions/integration/mssql.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -export TEST_MSSQL_VERSION=2017-latest - -echo "::group::MSSQL ${TEST_MSSQL_VERSION}"; -docker pull mcr.microsoft.com/mssql/server:${TEST_MSSQL_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:mssql -echo "::endgroup::" - -export TEST_MSSQL_VERSION=2019-latest - -echo "::group::MSSQL ${TEST_MSSQL_VERSION}"; -docker pull mcr.microsoft.com/mssql/server:${TEST_MSSQL_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:mssql -echo "::endgroup::" diff --git a/.github/actions/integration/mysql-aurora-serverless.sh b/.github/actions/integration/mysql-aurora-serverless.sh deleted file mode 100755 index edd5c5161772c..0000000000000 --- a/.github/actions/integration/mysql-aurora-serverless.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -export TEST_MYSQL_VERSION=5.6.50 -export TEST_LOCAL_DATA_API_VERSION=0.6.4 - -echo "::group::MySQL ${TEST_MYSQL_VERSION} Data Api ${TEST_LOCAL_DATA_API_VERSION}"; -docker pull mysql:${TEST_MYSQL_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:mysql-aurora-serverless -echo "::endgroup::" diff --git a/.github/actions/integration/mysql.sh b/.github/actions/integration/mysql.sh deleted file mode 100755 index 3baef5c280dc6..0000000000000 --- a/.github/actions/integration/mysql.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -export TEST_MYSQL_VERSION=5.6 - -echo "::group::MySQL ${TEST_MYSQL_VERSION}"; -docker pull mysql:${TEST_MYSQL_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:mysql -echo "::endgroup::" - -export TEST_MYSQL_VERSION=5.7 - -echo "::group::MySQL ${TEST_MYSQL_VERSION}"; -docker pull mysql:${TEST_MYSQL_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:mysql -echo "::endgroup::" - -export TEST_MYSQL_VERSION=8.0.24 - -echo "::group::MySQL ${TEST_MYSQL_VERSION}"; -docker pull mysql:${TEST_MYSQL_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:mysql -echo "::endgroup::" diff --git a/.github/actions/integration/postgres.sh b/.github/actions/integration/postgres.sh deleted file mode 100755 index b44f927da5ddc..0000000000000 --- a/.github/actions/integration/postgres.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -export TEST_PGSQL_VERSION=12 - -echo "::group::PostgreSQL ${TEST_PGSQL_VERSION}" -docker pull postgres:${TEST_PGSQL_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:postgres -echo "::endgroup::" diff --git a/.github/actions/integration/prestodb.sh b/.github/actions/integration/prestodb.sh deleted file mode 100755 index c57b5844d609d..0000000000000 --- a/.github/actions/integration/prestodb.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -export TEST_PRESTO_VERSION=341-SNAPSHOT -export TEST_PGSQL_VERSION=12.4 - -echo "::group::PrestoDB ${TEST_PRESTO_VERSION} with PostgreSQL ${TEST_PGSQL_VERSION}" -docker pull lewuathe/presto-coordinator:${TEST_PRESTO_VERSION} -docker pull lewuathe/presto-worker:${TEST_PRESTO_VERSION} -docker pull postgres:${TEST_PGSQL_VERSION} -yarn lerna run --concurrency 1 --stream --no-prefix integration:presto -echo "::endgroup::" diff --git a/.github/actions/integration/snowflake.sh b/.github/actions/integration/snowflake.sh deleted file mode 100755 index 368030bfc09a3..0000000000000 --- a/.github/actions/integration/snowflake.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -echo "::group::Snowflake [cloud]" -export CUBEJS_DB_NAME=DEMO_DB -export CUBEJS_DB_SNOWFLAKE_ACCOUNT=lxb31104 -export CUBEJS_DB_SNOWFLAKE_REGION=us-west-2 -export CUBEJS_DB_SNOWFLAKE_WAREHOUSE=COMPUTE_WH - -yarn lerna run --concurrency 1 --stream --no-prefix smoke:snowflake - -echo "::endgroup::" diff --git a/.github/actions/smoke.sh b/.github/actions/smoke.sh deleted file mode 100755 index 57eceb824b904..0000000000000 --- a/.github/actions/smoke.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash -set -eo pipefail - -# Debug log for test containers -export DEBUG=testcontainers - -echo "::group::Oracle" -yarn lerna run --concurrency 1 --stream --no-prefix smoke:oracle -echo "::endgroup::" - -echo "::group::DuckDB" -# Should we create a separate job integration-duckdb? I believe not, because it works fast. -yarn lerna run --concurrency 1 --stream --no-prefix integration:duckdb -yarn lerna run --concurrency 1 --stream --no-prefix smoke:duckdb -echo "::endgroup::" - -echo "::group::Postgres" -yarn lerna run --concurrency 1 --stream --no-prefix smoke:postgres -echo "::endgroup::" - -echo "::group::QuestDB" -yarn lerna run --concurrency 1 --stream --no-prefix smoke:questdb -echo "::endgroup::" - -echo "::group::Crate" -yarn lerna run --concurrency 1 --stream --no-prefix smoke:crate -echo "::endgroup::" - -echo "::group::Lambda" -yarn lerna run --concurrency 1 --stream --no-prefix smoke:lambda -echo "::endgroup::" - -echo "::group::Materialize" -yarn lerna run --concurrency 1 --stream --no-prefix smoke:materialize -echo "::endgroup::" - -echo "::group::Multidb" -yarn lerna run --concurrency 1 --stream --no-prefix smoke:multidb -echo "::endgroup::" - -#echo "::group::Prestodb" -#docker rm -vf $(docker ps -aq) -#docker rmi -f $(docker images -aq) -#docker pull ahanaio/prestodb-sandbox:0.281 -#yarn lerna run --concurrency 1 --stream --no-prefix smoke:prestodb -#echo "::endgroup::" - -echo "::group::Trino" -yarn lerna run --concurrency 1 --stream --no-prefix smoke:trino -echo "::endgroup::" - -echo "::group::MS SQL" -yarn lerna run --concurrency 1 --stream --no-prefix smoke:mssql -echo "::endgroup::" - -echo "::group::MongoBI" -yarn lerna run --concurrency 1 --stream --no-prefix smoke:mongobi -echo "::endgroup::" \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f89a7a798cae0..2c17185601507 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -3,4 +3,4 @@ updates: - package-ecosystem: "npm" directory: "/packages" schedule: - interval: "weekly" + interval: "daily" diff --git a/.github/label-actions.yml b/.github/label-actions.yml deleted file mode 100644 index cb7792b1ce279..0000000000000 --- a/.github/label-actions.yml +++ /dev/null @@ -1,6 +0,0 @@ -"help wanted": - comment: | - If you are interested in working on this issue, please provide go ahead and provide PR for that. - We'd be happy to review it and merge it. - If this is the first time you are contributing a Pull Request to Cube, please check our [contribution guidelines](https://github.com/cube-js/cube/blob/master/CONTRIBUTING.md). - You can also post any questions while contributing in the #contributors channel in the [Cube Slack](https://slack.cube.dev/). diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md deleted file mode 100644 index ff3368acb3db3..0000000000000 --- a/.github/pull_request_template.md +++ /dev/null @@ -1,13 +0,0 @@ -**Check List** -- [ ] Tests has been run in packages where changes made if available -- [ ] Linter has been run for changed code -- [ ] Tests for the changes have been added if not covered yet -- [ ] Docs have been added / updated if required - -**Issue Reference this PR resolves** - -[For example #12] - -**Description of Changes Made (if issue reference is not provided)** - -[Description goes here] diff --git a/.github/workflows/birdbox.yml b/.github/workflows/birdbox.yml deleted file mode 100644 index 6e9fb16e84623..0000000000000 --- a/.github/workflows/birdbox.yml +++ /dev/null @@ -1,228 +0,0 @@ -name: 'Bird: Release E2E tests 📦' -on: - workflow_dispatch: - inputs: - version: - description: 'Version' - required: true - default: 'latest' - workflow_run: - workflows: [ "Release" ] - types: - - completed - push: - paths: - - '.github/workflows/birdbox.yml' - branches: - - 'master' - -# touch to trigger -jobs: - cli: - runs-on: ubuntu-20.04 - timeout-minutes: 60 - strategy: - fail-fast: false - matrix: - os: [ubuntu-20.04, ubuntu-18.04, macos-11, windows-2019] - node-version: [16.x] - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Disable rustup update (issue workaround for Windows) - run: rustup set auto-self-update disable - if: contains(runner.os, 'windows') - shell: bash - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2022-06-22 - override: true - components: rustfmt - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Restore lerna - uses: actions/cache@v3 - with: - # npm cache files are stored in `~/.npm` on Linux/macOS - path: | - ~/.npm - node_modules - rust/cubestore/node_modules - packages/*/node_modules - key: ${{ runner.os }}-workspace-main-16.x-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-workspace-main-16.x- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Build client - run: yarn build - - name: Lerna tsc - run: yarn tsc - - name: Birdbox - env: - CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} - run: | - cd packages/cubejs-testing/ - npx cubejs-cli@${{ github.event.inputs.version }} create birdbox-test-project -d postgres - export BIRDBOX_CUBEJS_VERSION=${{ github.event.inputs.version }}${{ matrix.node-arch }} - yarn run dataset:minimal - yarn run birdbox:cli:postgresql - - name: Slack Failure - if: failure() - uses: voxmedia/github-action-slack-notify-build@v1 - with: - channel: oss-ci - status: FAILED - color: danger - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - - docker: - runs-on: ubuntu-20.04 - timeout-minutes: 60 - strategy: - fail-fast: false - matrix: - target: ['postgresql', 'postgresql-cubestore', 'postgresql-pre-aggregations'] - arch: ['', '-alpine'] - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2022-06-22 - override: true - components: rustfmt - - name: Install Node.js 16.x - uses: actions/setup-node@v3 - with: - node-version: 16.x - - name: Restore lerna - uses: actions/cache@v3 - with: - # npm cache files are stored in `~/.npm` on Linux/macOS - path: | - ~/.npm - node_modules - rust/cubestore/node_modules - packages/*/node_modules - key: ${{ runner.os }}-workspace-main-16.x-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-workspace-main-16.x- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Build client - run: yarn build - - name: Lerna tsc - run: yarn tsc - - name: Birdbox - run: | - cd packages/cubejs-testing/ - export DEBUG=testcontainers - export BIRDBOX_CUBEJS_VERSION=${{ github.event.inputs.version }}${{ matrix.node-arch }} - yarn run dataset:minimal - yarn run birdbox:${{ matrix.target }} - - name: Slack Failure - if: failure() - uses: voxmedia/github-action-slack-notify-build@v1 - with: - channel: oss-ci - status: FAILED - color: danger - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - - cypress: - runs-on: ${{ matrix.os }} - timeout-minutes: 60 - strategy: - fail-fast: false - matrix: - browser: [chrome] - include: - - browser: chrome - os: ubuntu-20.04 - container: cypress/browsers:node12.18.3-chrome89-ff86 - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2022-06-22 - override: true - components: rustfmt - - name: Install Node.js 16.x - uses: actions/setup-node@v3 - with: - node-version: 16.x - - name: Restore lerna - uses: actions/cache@v3 - with: - # npm cache files are stored in `~/.npm` on Linux/macOS - path: | - ~/.npm - node_modules - rust/cubestore/node_modules - packages/*/node_modules - key: ${{ runner.os }}-workspace-main-16.x-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-workspace-main-16.x- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Lerna tsc - run: yarn tsc - - name: Birdbox - env: - CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} - run: | - cd packages/cubejs-testing/ - export DEBUG=testcontainers - export BIRDBOX_CYPRESS_BROWSER=${{ matrix.browser }} - export BIRDBOX_CUBEJS_VERSION=${{ github.event.inputs.version }} - yarn run cypress:install - yarn run dataset:minimal - yarn run cypress:birdbox - - name: Slack Failure - if: failure() - uses: voxmedia/github-action-slack-notify-build@v1 - with: - channel: oss-ci - status: FAILED - color: danger - env: - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} diff --git a/.github/workflows/cloud.yml b/.github/workflows/cloud.yml deleted file mode 100644 index 12db29cddb85c..0000000000000 --- a/.github/workflows/cloud.yml +++ /dev/null @@ -1,106 +0,0 @@ -# CAUTION: This workflow exposes secrets. It is only supposed to be run on "merge into master" condition. -name: 'Cloud Tests' -on: - # push doesn't work for PRs, it's safe to use it - # only maintainers/core contributors are allowed to push directly to the main repository - push: - paths: - - '.github/actions/integration/athena.sh' - - '.github/actions/integration/bigquery.sh' - - '.github/actions/integration/snowflake.sh' - - '.github/workflows/cloud.yml' - - 'packages/cubejs-testing/**' - - 'packages/cubejs-testing-shared/**' - - 'packages/cubejs-backend-shared/**' - - 'packages/cubejs-base-driver/**' - - 'packages/cubejs-query-orchestrator/**' - - 'packages/cubejs-schema-compiler/**' - # drivers - - 'packages/cubejs-athena-driver/**' - - 'packages/cubejs-bigquery-driver/**' - - 'packages/cubejs-snowflake-driver/**' - - 'lerna.json' - - 'package.json' - - 'yarn.lock' - -jobs: - latest-tag-sha: - runs-on: ubuntu-20.04 - outputs: - sha: ${{ steps.get-tag.outputs.sha }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - id: git-log - run: git log HEAD~30..HEAD - - id: get-tag-test - run: echo "$SHA $(git rev-list -n 1 $(git tag --contains $SHA))" - env: - SHA: ${{ github.sha }} - - id: get-tag - run: echo "::set-output name=sha::$(git rev-list -n 1 $(git tag --contains $SHA))" - env: - SHA: ${{ github.sha }} - - id: get-tag-out - run: echo "$OUT" - env: - OUT: ${{ steps.get-tag.outputs.sha }} - - integration-cloud: - needs: latest-tag-sha - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - runs-on: ubuntu-20.04 - timeout-minutes: 60 - - strategy: - matrix: - node-version: [ 16.x ] - db: [ 'athena', 'bigquery', 'snowflake' ] - fail-fast: false - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Disable rustup update (issue workaround for Windows) - run: rustup set auto-self-update disable - if: contains(runner.os, 'windows') - shell: bash - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2022-06-22 - override: true - components: rustfmt - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Yarn install - run: CUBESTORE_SKIP_POST_INSTALL=true yarn install --frozen-lockfile - - name: Build client - run: yarn build - - name: Lerna tsc - run: yarn tsc - - name: Run Integration tests for ${{ matrix.db }} matrix - timeout-minutes: 30 - env: - CUBEJS_DB_BQ_CREDENTIALS: ${{ secrets.CUBEJS_DB_BQ_CREDENTIALS }} - CUBEJS_AWS_KEY: ${{ secrets.CUBEJS_AWS_KEY }} - CUBEJS_AWS_SECRET: ${{ secrets.CUBEJS_AWS_SECRET }} - CUBEJS_DB_USER: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_USER }} - CUBEJS_DB_PASS: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_PASS }} - run: ./.github/actions/integration/${{ matrix.db }}.sh diff --git a/.github/workflows/drivers-tests.yml b/.github/workflows/drivers-tests.yml deleted file mode 100644 index 7b6b1d5aed909..0000000000000 --- a/.github/workflows/drivers-tests.yml +++ /dev/null @@ -1,264 +0,0 @@ -name: 'Drivers tests' - -on: - push: - branches: - - 'master' - paths: - - '.github/workflows/drivers-tests.yml' - - - 'packages/cubejs-testing-drivers/**' - - 'packages/cubejs-testing-shared/**' - - 'packages/cubejs-query-orchestrator/src/**' - - 'packages/cubejs-base-driver/src/**' - - 'packages/cubejs-jdbc-driver/src/**' - - - 'packages/cubejs-athena-driver/**' - - 'packages/cubejs-clickhouse-driver/**' - - 'packages/cubejs-databricks-jdbc-driver/**' - - 'packages/cubejs-mssql-driver/**' - - 'packages/cubejs-mysql-driver/**' - - 'packages/cubejs-postgres-driver/**' - - 'packages/cubejs-snowflake-driver/**' - pull_request: - paths: - - '.github/workflows/drivers-tests.yml' - - - 'packages/cubejs-testing-drivers/**' - - 'packages/cubejs-testing-shared/**' - - 'packages/cubejs-query-orchestrator/src/**' - - 'packages/cubejs-base-driver/src/**' - - 'packages/cubejs-jdbc-driver/src/**' - - - 'packages/cubejs-athena-driver/**' - - 'packages/cubejs-clickhouse-driver/**' - - 'packages/cubejs-databricks-jdbc-driver/**' - - 'packages/cubejs-mssql-driver/**' - - 'packages/cubejs-mysql-driver/**' - - 'packages/cubejs-postgres-driver/**' - - 'packages/cubejs-snowflake-driver/**' - -jobs: - latest-tag-sha: - runs-on: ubuntu-20.04 - outputs: - sha: ${{ steps.get-tag.outputs.sha }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - id: git-log - run: git log HEAD~30..HEAD - - id: get-tag-test - run: echo "$SHA $(git rev-list -n 1 $(git tag --contains $SHA))" - env: - SHA: ${{ github.sha }} - - id: get-tag - run: echo "::set-output name=sha::$(git rev-list -n 1 $(git tag --contains $SHA))" - env: - SHA: ${{ github.sha }} - - id: get-tag-out - run: echo "$OUT" - env: - OUT: ${{ steps.get-tag.outputs.sha }} - - native_linux: - runs-on: ubuntu-20.04 - timeout-minutes: 60 - name: Build native Linux ${{ matrix.node-version }} ${{ matrix.target }} Python ${{ matrix.python-version }} - strategy: - matrix: - node-version: [ 16 ] - python-version: [ "fallback" ] - target: [ "x86_64-unknown-linux-gnu" ] - fail-fast: false - container: - image: cubejs/rust-cross:${{ matrix.target }}-30052023 - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - target: ${{ matrix.target }} - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Install Yarn - run: npm install -g yarn - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Copy yarn.lock file - run: cp yarn.lock packages/cubejs-docker - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Compile TypeScript - run: yarn tsc - - name: Build native (fallback) - if: (matrix.python-version == 'fallback') - env: - CARGO_BUILD_TARGET: ${{ matrix.target }} - run: cd packages/cubejs-backend-native && npm run native:build-release - - name: Setup cross compilation - if: (matrix.target == 'aarch64-unknown-linux-gnu') - uses: allenevans/set-env@v3.0.0 - with: - PYO3_CROSS_PYTHON_VERSION: ${{ matrix.python-version }} - - name: Build native (with Python) - if: (matrix.python-version != 'fallback') - env: - PYO3_PYTHON: python${{ matrix.python-version }} - CARGO_BUILD_TARGET: ${{ matrix.target }} - run: cd packages/cubejs-backend-native && npm run native:build-release-python - - name: Upload native build - uses: actions/upload-artifact@v4 - with: - name: backend-native - path: packages/cubejs-backend-native/index.node - - build: - needs: [latest-tag-sha, native_linux] - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - runs-on: ubuntu-20.04 - timeout-minutes: 30 - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - # Building docker - - name: Login to DockerHub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - - name: Download native build - uses: actions/download-artifact@v4 - with: - name: backend-native - path: packages/cubejs-backend-native/ - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - version: v0.10.3 - - - name: Build and push - uses: docker/build-push-action@v4 - with: - context: . - file: ./packages/cubejs-docker/testing-drivers.Dockerfile - tags: cubejs/cube:testing-drivers - push: true - - tests: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - needs: [latest-tag-sha, build] - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - strategy: - matrix: - node: - - 16.x - database: - - athena - - bigquery - - clickhouse - - databricks-jdbc - - databricks-jdbc-export-bucket - - mssql - - mysql - - postgres - - snowflake - fail-fast: false - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2022-06-22 - override: true - components: rustfmt - - - name: Install Node.js 16.x - uses: actions/setup-node@v3 - with: - node-version: 16.x - - - name: Configure `yarn` - run: yarn policies set-version v1.22.19 - - - name: Install dependencies - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - - name: Build client - run: yarn build - - - name: Build packages - run: yarn tsc - - - name: Build tests - run: | - cd packages/cubejs-testing-drivers - yarn tsc - - - name: Run tests - env: - # Athena - DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY: ${{ secrets.DRIVERS_TESTS_ATHENA_CUBEJS_AWS_KEY }} - DRIVERS_TESTS_ATHENA_CUBEJS_AWS_SECRET: ${{ secrets.DRIVERS_TESTS_ATHENA_CUBEJS_AWS_SECRET }} - - # BigQuery - DRIVERS_TESTS_CUBEJS_DB_BQ_CREDENTIALS: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_BQ_CREDENTIALS }} - - # Databricks - DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_URL: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_URL }} - DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_TOKEN: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_DATABRICKS_TOKEN }} - DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_KEY: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_KEY }} - DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET }} - - # Snowflake - DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_USER: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_USER }} - DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_PASS: ${{ secrets.DRIVERS_TESTS_CUBEJS_DB_SNOWFLAKE_PASS }} - run: | - cd ./packages/cubejs-testing-drivers - export DEBUG=testcontainers - yarn ${{ matrix.database }}-full \ No newline at end of file diff --git a/.github/workflows/examples-publish.yml b/.github/workflows/examples-publish.yml deleted file mode 100644 index 2ed256618ee7e..0000000000000 --- a/.github/workflows/examples-publish.yml +++ /dev/null @@ -1,700 +0,0 @@ -name: Examples - -on: - push: - paths: - - '.github/workflows/examples-publish.yml' - - '.github/actions/deploy-example.sh' - - 'examples/**' - branches: - - master - -jobs: - - clickhouse-multi-tenancy: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/building-an-open-source-data-stack-with-clickhouse-and-cube-workshop/*,examples/building-an-open-source-data-stack-with-clickhouse-and-cube-workshop/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: building-an-open-source-data-stack-with-clickhouse-and-cube-workshop - EXAMPLE_FRONTEND_SUBDIRECTORY: dashboard-app-multi-tenancy - EXAMPLE_FRONTEND_BUILD_SUBDIRECTORY: dist/ - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - EXAMPLE_CUBE_SKIP: 1 - - clickhouse-multiple-sources: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/building-an-open-source-data-stack-with-clickhouse-and-cube-workshop/*,examples/building-an-open-source-data-stack-with-clickhouse-and-cube-workshop/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: building-an-open-source-data-stack-with-clickhouse-and-cube-workshop - EXAMPLE_FRONTEND_SUBDIRECTORY: dashboard-app-multiple-data-sources - EXAMPLE_FRONTEND_BUILD_SUBDIRECTORY: dist/ - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - EXAMPLE_CUBE_SKIP: 1 - - apollo-federation: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/apollo-federation-with-cube/*,examples/apollo-federation-with-cube/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: apollo-federation-with-cube - EXAMPLE_FRONTEND_BUILD_SUBDIRECTORY: dist/ - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - EXAMPLE_CUBE_SKIP: 1 - - ksql: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/ksql/*,examples/ksql/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: ksql - EXAMPLE_FRONTEND_BUILD_SUBDIRECTORY: build/ - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - EXAMPLE_CUBE_SKIP: 1 - - hasura-remote-schema: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/hasura-remote-schema-with-cube/*,examples/hasura-remote-schema-with-cube/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: hasura-remote-schema-with-cube - EXAMPLE_FRONTEND_BUILD_SUBDIRECTORY: dist/ - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - EXAMPLE_CUBE_SKIP: 1 - - angular-dashboard: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/angular-dashboard-with-material-ui/*,examples/angular-dashboard-with-material-ui/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: angular-dashboard-with-material-ui - EXAMPLE_FRONTEND_BUILD_SUBDIRECTORY: dist/dashboard-app - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_ANGULAR_DASHBOARD }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - compare-date-range: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/compare-date-range/*,examples/compare-date-range/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: compare-date-range - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_DATA_RANGE }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - clickhouse-dashboard: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/clickhouse-dashboard/*,examples/clickhouse-dashboard/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: clickhouse-dashboard - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_CLICKHOUSE_DASHBOARD }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - d3-dashboard: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/d3-dashboard/*,examples/d3-dashboard/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: d3-dashboard - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_D3_DASHBOARD }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - data-blending: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/data-blending/*,examples/data-blending/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: data-blending - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_DATA_BLENDING }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - drill-downs: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/drill-downs/*,examples/drill-downs/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: drill-downs - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_DRILL_DOWNS }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - ecom-backend: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/ecom-backend/*,examples/ecom-backend/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: ecom-backend - EXAMPLE_FRONTEND_SKIP: 1 - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_ECOM_BACKEND }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - external-rollups: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/external-rollups/*,examples/external-rollups/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: external-rollups - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_EXTERNAL_ROLLUPS }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - hacktoberfest: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/hacktoberfest/*,examples/hacktoberfest/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: hacktoberfest - EXAMPLE_FRONTEND_SUBDIRECTORY: dashboard - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_HACKTOBERFEST }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - mapbox: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/mapbox/*,examples/mapbox/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: mapbox - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_MAPBOX }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - react-dashboard: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/react-dashboard/*,examples/react-dashboard/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: react-dashboard - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_REACT_DASHBOARD }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - react-muze: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/react-muze/*,examples/react-muze/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: react-muze - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_REACT_MUZE }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - real-time-dashboard: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/real-time-dashboard/*,examples/real-time-dashboard/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: real-time-dashboard - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_REAL_TIME_DASHBOARD }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - web-analytics: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/web-analytics/*,examples/web-analytics/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: web-analytics - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_WEB_ANALYTICS }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - auth0: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/auth0/*,examples/auth0/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: auth0 - CUBE_CLOUD_DEPLOY_AUTH: ${{ secrets.CUBE_CLOUD_DEPLOY_AUTH_AUTH0 }} - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - bigquery-public-datasets: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/bigquery-public-datasets/*,examples/bigquery-public-datasets/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: bigquery-public-datasets - EXAMPLE_CUBE_SKIP: 1 - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - google-charts-moma: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/google-charts-moma/*,examples/google-charts-moma/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: google-charts-moma - EXAMPLE_CUBE_SKIP: 1 - EXAMPLE_FRONTEND_BUILD_SUBDIRECTORY: public/ - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - deepnote: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/deepnote/*,examples/deepnote/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: deepnote - EXAMPLE_CUBE_SKIP: 1 - EXAMPLE_FRONTEND_BUILD_SUBDIRECTORY: dist/ - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - EXAMPLE_FRONTEND_SUBDIRECTORY: './' - - graphql-api-metrics-dashboard: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/graphql-api-metrics-dashboard/*,examples/graphql-api-metrics-dashboard/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: graphql-api-metrics-dashboard - EXAMPLE_CUBE_SKIP: 1 - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - EXAMPLE_FRONTEND_SUBDIRECTORY: './' - - multi-tenant-analytics: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/multi-tenant-analytics/*,examples/multi-tenant-analytics/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: multi-tenant-analytics - EXAMPLE_CUBE_SKIP: 1 - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - multitenancy-workshop: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/multitenancy-workshop/*,examples/multitenancy-workshop/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: multitenancy-workshop - EXAMPLE_CUBE_SKIP: 1 - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - - aws-web-analytics: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/aws-web-analytics/*,examples/aws-web-analytics/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: aws-web-analytics - EXAMPLE_CUBE_SKIP: 1 - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - EXAMPLE_FRONTEND_SUBDIRECTORY: 'analytics-dashboard' - - event-analytics: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v4 - - - uses: pheel/path-watcher-action@v1 - id: modified - with: - paths: '.github/workflows/examples-publish.yml,.github/actions/deploy-example.sh,examples/event-analytics/*,examples/event-analytics/**' - - - if: steps.modified.outputs.modified - uses: actions/setup-node@v3 - with: - node-version: 14.x - - - if: steps.modified.outputs.modified - run: .github/actions/deploy-example.sh - env: - EXAMPLE_SLUG: event-analytics - EXAMPLE_CUBE_SKIP: 1 - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - EXAMPLE_FRONTEND_SUBDIRECTORY: 'frontend' diff --git a/.github/workflows/issue-labeler.yml b/.github/workflows/issue-labeler.yml deleted file mode 100644 index d49296bab5c60..0000000000000 --- a/.github/workflows/issue-labeler.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: Issue Labeler -on: - issues: - types: [labeled] - -jobs: - main: - name: Process Label Action - runs-on: ubuntu-20.04 - steps: - - uses: actions/checkout@v4 - - name: Process Label Action - uses: hramos/label-actions@v1 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml deleted file mode 100644 index 1c2c5cadde3a6..0000000000000 --- a/.github/workflows/master.yml +++ /dev/null @@ -1,73 +0,0 @@ -name: Master -on: - push: - paths: - - '.github/workflows/push.yml' - - '.github/workflows/master.yml' - - 'packages/**' - - '.eslintrc.js' - - '.prettierrc' - - 'lerna.json' - - 'package.json' - - 'rollup.config.js' - - 'yarn.lock' - branches: - - master -jobs: - latest-tag-sha: - runs-on: ubuntu-20.04 - outputs: - sha: ${{ steps.get-tag.outputs.sha }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - id: git-log - run: git log HEAD~30..HEAD - - id: get-tag-test - run: echo "$SHA $(git rev-list -n 1 $(git tag --contains $SHA))" - env: - SHA: ${{ github.sha }} - - id: get-tag - run: echo "::set-output name=sha::$(git rev-list -n 1 $(git tag --contains $SHA))" - env: - SHA: ${{ github.sha }} - - id: get-tag-out - run: echo "$OUT" - env: - OUT: ${{ steps.get-tag.outputs.sha }} - - docker-image-dev: - name: Release :dev image - runs-on: ubuntu-20.04 - needs: latest-tag-sha - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - name: Login to DockerHub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - version: v0.9.1 - - name: Push to Docker Hub - uses: docker/build-push-action@v3 - with: - context: ./ - file: ./packages/cubejs-docker/dev.Dockerfile - platforms: linux/amd64 - push: true - tags: cubejs/cube:dev - - name: Update repo description - uses: peter-evans/dockerhub-description@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - repository: cubejs/cube - readme-filepath: ./packages/cubejs-docker/README.md diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml deleted file mode 100644 index 84b591b7246af..0000000000000 --- a/.github/workflows/pr.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Pull Requests -on: - pull_request_target: - types: [opened] - pull_request: - types: [opened] - -jobs: - authorDetector: - name: Detect PR's author - runs-on: ubuntu-20.04 - steps: - - name: Checkout Actions - uses: actions/checkout@v4 - with: - repository: 'cube-js/github-actions' - path: ./actions - ref: v5 - - name: Install Actions - run: npm install --production --prefix ./actions - - name: Detect author - uses: ./actions/author-detector - with: - token: ${{secrets.GITHUB_TOKEN}} - addCoreLabel: false - communityLabel: pr:community diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml deleted file mode 100644 index 4f9dae931e0a0..0000000000000 --- a/.github/workflows/publish.yml +++ /dev/null @@ -1,895 +0,0 @@ -name: Release - -on: - push: - tags: - - 'v*.*.*' - - 'v*.*.*-*' - -jobs: - npm: - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - - name: Install Node.js 16.x - uses: actions/setup-node@v3 - with: - node-version: 16.x - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Copy yarn.lock file - run: cp yarn.lock packages/cubejs-docker - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Build Core Client libraries - run: yarn build - - name: Build other packages - run: yarn lerna run --concurrency 1 build - env: - NODE_OPTIONS: --max_old_space_size=4096 - - name: Set NPM token - run: echo //registry.npmjs.org/:_authToken=$NPM_TOKEN > ~/.npmrc - env: - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} - - name: NPM publish - run: ./node_modules/.bin/lerna publish from-package --yes - - native_linux: - runs-on: ubuntu-20.04 - timeout-minutes: 60 - name: Build native Linux ${{ matrix.node-version }} ${{ matrix.target }} Python ${{ matrix.python-version }} - strategy: - matrix: - node-version: [16] - python-version: ["3.9", "3.10", "3.11", "3.12", "fallback"] - target: ["x86_64-unknown-linux-gnu", "aarch64-unknown-linux-gnu"] - include: - - target: x86_64-unknown-linux-gnu - package_target_arch: x64 - package_target_platform: linux - package_target_libc: glibc - - target: aarch64-unknown-linux-gnu - package_target_arch: arm64 - package_target_platform: linux - package_target_libc: glibc - fail-fast: false - container: - image: cubejs/rust-cross:${{ matrix.target }}-30052023 - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - target: ${{ matrix.target }} - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Install Yarn - run: npm install -g yarn - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Copy yarn.lock file - run: cp yarn.lock packages/cubejs-docker - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Compile TypeScript - run: yarn tsc - - name: Build native (fallback) - if: (matrix.python-version == 'fallback') - env: - CARGO_BUILD_TARGET: ${{ matrix.target }} - run: cd packages/cubejs-backend-native && npm run native:build-release - - name: Setup cross compilation - if: (matrix.target == 'aarch64-unknown-linux-gnu') - uses: allenevans/set-env@v3.0.0 - with: - PYO3_CROSS_PYTHON_VERSION: ${{ matrix.python-version }} - - name: Build native (with Python) - if: (matrix.python-version != 'fallback') - env: - PYO3_PYTHON: python${{ matrix.python-version }} - CARGO_BUILD_TARGET: ${{ matrix.target }} - run: cd packages/cubejs-backend-native && npm run native:build-release-python - - name: Archive release asset - shell: bash - run: | - cd packages/cubejs-backend-native - rm -rf native - mkdir native - cp index.node native/index.node - tar czvf native.tar.gz native - - name: Upload to Release - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: packages/cubejs-backend-native/native.tar.gz - # Example: native-linux-arm64-glibc.tar.gz - asset_name: native-${{ matrix.package_target_platform }}-${{ matrix.package_target_arch }}-${{ matrix.package_target_libc }}-${{ matrix.python-version }}.tar.gz - tag: ${{ github.ref }} - overwrite: true - - native_macos: - runs-on: ${{ matrix.os-version }} - timeout-minutes: 90 - name: Build ${{ matrix.os-version }} ${{ matrix.target }} ${{ matrix.node-version }} Python ${{ matrix.python-version }} - strategy: - matrix: - node-version: [16.x] - os-version: ["macos-11"] - target: ["x86_64-apple-darwin", "aarch64-apple-darwin"] - python-version: ["3.9", "3.10", "3.11", "3.12", "fallback"] - include: - - target: x86_64-apple-darwin - package_target_arch: x64 - package_target_platform: darwin - package_target_libc: unknown - tar_executable: gtar - - target: aarch64-apple-darwin - package_target_arch: arm64 - package_target_platform: darwin - package_target_libc: unknown - tar_executable: gtar - exclude: - # Disable python builds for aarch64 (not ready) - - target: "aarch64-apple-darwin" - python-version: "3.9" - - target: "aarch64-apple-darwin" - python-version: "3.10" - - target: "aarch64-apple-darwin" - python-version: "3.11" - - target: "aarch64-apple-darwin" - python-version: "3.12" - fail-fast: false - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - target: ${{ matrix.target }} - - name: Install Python - uses: actions/setup-python@v4 - if: (matrix.python-version != 'fallback') - with: - python-version: ${{ matrix.python-version }} - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Copy yarn.lock file - run: cp yarn.lock packages/cubejs-docker - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Compile TypeScript - run: yarn tsc - - name: Build native (fallback) - if: (matrix.python-version == 'fallback') - run: cd packages/cubejs-backend-native && npm run native:build-release - env: - CARGO_BUILD_TARGET: ${{ matrix.target }} - - name: Build native (with Python) - if: (matrix.python-version != 'fallback') - env: - PYO3_PYTHON: python${{ matrix.python-version }} - CARGO_BUILD_TARGET: ${{ matrix.target }} - run: cd packages/cubejs-backend-native && npm run native:build-release-python - - name: Archive release asset - shell: bash - run: | - cd packages/cubejs-backend-native - rm -rf native - mkdir native - cp index.node native/index.node - ${{ matrix.tar_executable }} -czvf native.tar.gz native - - name: Upload to Release - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: packages/cubejs-backend-native/native.tar.gz - # Example: native-linux-arm64-glibc.tar.gz - asset_name: native-${{ matrix.package_target_platform }}-${{ matrix.package_target_arch }}-${{ matrix.package_target_libc }}-${{ matrix.python-version }}.tar.gz - tag: ${{ github.ref }} - overwrite: true - - native_windows: - runs-on: ${{ matrix.os-version }} - timeout-minutes: 90 - name: Build ${{ matrix.os-version }} ${{ matrix.node-version }} Python ${{ matrix.python-version }} - strategy: - matrix: - node-version: [16.x] - python-version: ["fallback"] - os-version: [windows-2019] - include: - - os-version: windows-2019 - package_target_arch: x64 - package_target_platform: win32 - package_target_libc: unknown - tar_executable: tar - fail-fast: false - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Disable rustup update (issue workaround for Windows) - run: rustup set auto-self-update disable - shell: bash - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - - name: Install Python - uses: actions/setup-python@v4 - if: (matrix.python-version != 'fallback') - with: - python-version: ${{ matrix.python-version }} - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Copy yarn.lock file - run: cp yarn.lock packages/cubejs-docker - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Compile TypeScript - run: yarn tsc - - name: Build native (fallback) - if: (matrix.python-version == 'fallback') - run: cd packages/cubejs-backend-native && npm run native:build-release - - name: Build native (with Python) - if: (matrix.python-version != 'fallback') - env: - PYO3_PYTHON: python${{ matrix.python-version }} - run: cd packages/cubejs-backend-native && npm run native:build-release-python - - name: Archive release asset - shell: bash - run: | - cd packages/cubejs-backend-native - rm -rf native - mkdir native - cp index.node native/index.node - ${{ matrix.tar_executable }} -czvf native.tar.gz native - - name: Upload to Release - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: packages/cubejs-backend-native/native.tar.gz - # Example: native-linux-arm64-glibc.tar.gz - asset_name: native-${{ matrix.package_target_platform }}-${{ matrix.package_target_arch }}-${{ matrix.package_target_libc }}-${{ matrix.python-version }}.tar.gz - tag: ${{ github.ref }} - overwrite: true - - docker-default: - needs: [npm, cubestore_linux, native_linux] - name: Debian docker image - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - include: - - os: ubuntu-22.04 - target: x86_64-unknown-linux-gnu - platforms: linux/amd64,linux/arm64 - timeout-minutes: 90 - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - name: Repo metadata - id: repo - uses: actions/github-script@v3 - with: - script: | - const { data } = await github.repos.get(context.repo) - const reg = new RegExp('📊 ', 'ug'); - data.description = data.description.replace(reg, "") - return data - - name: Prepare - id: prep - run: | - DOCKER_IMAGE=cubejs/cube - VERSION=noop - if [ "${{ github.event_name }}" = "schedule" ]; then - VERSION=nightly - elif [[ $GITHUB_REF == refs/tags/* ]]; then - VERSION=${GITHUB_REF#refs/tags/} - fi - TAGS="${DOCKER_IMAGE}:${VERSION}" - if [[ $VERSION =~ ^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then - MINOR=${VERSION%.*} - MAJOR=${MINOR%.*} - TAGS="$TAGS,${DOCKER_IMAGE}:${MINOR},${DOCKER_IMAGE}:${MAJOR},${DOCKER_IMAGE}:latest" - fi - echo ::set-output name=version::${VERSION} - echo ::set-output name=tags::${TAGS} - echo ::set-output name=created::$(date -u +'%Y-%m-%dT%H:%M:%SZ') - - name: Login to DockerHub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - version: v0.10.3 - - name: Copy yarn.lock file - run: cp yarn.lock packages/cubejs-docker - - name: Push to Docker Hub - uses: docker/build-push-action@v3 - with: - context: ./packages/cubejs-docker - file: ./packages/cubejs-docker/latest.Dockerfile - platforms: ${{ matrix.platforms }} - push: true - tags: ${{ steps.prep.outputs.tags }} - labels: | - org.opencontainers.image.title=${{ fromJson(steps.repo.outputs.result).name }} - org.opencontainers.image.description=${{ fromJson(steps.repo.outputs.result).description }} - org.opencontainers.image.url=${{ fromJson(steps.repo.outputs.result).html_url }} - org.opencontainers.image.source=${{ fromJson(steps.repo.outputs.result).clone_url }} - org.opencontainers.image.version=${{ steps.prep.outputs.version }} - org.opencontainers.image.created=${{ steps.prep.outputs.created }} - org.opencontainers.image.revision=${{ github.sha }} - org.opencontainers.image.licenses=${{ fromJson(steps.repo.outputs.result).license.spdx_id }} - # Workaround for yarn v1, it uses aggressive timeouts with summing time spending on fs, https://github.com/yarnpkg/yarn/issues/4890 - config-inline: | - [worker.oci] - max-parallelism = 1 - build-args: | - IMAGE_VERSION=${{ steps.prep.outputs.version }} - - docker-alpine: - needs: [npm, cubestore_linux, native_linux] - name: Alpine docker image - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - name: Repo metadata - id: repo - uses: actions/github-script@v3 - with: - script: | - const { data } = await github.repos.get(context.repo) - const reg = new RegExp('📊 ', 'ug'); - data.description = data.description.replace(reg, "") - return data - - name: Prepare - id: prep - run: | - DOCKER_IMAGE=cubejs/cube - VERSION=noop - if [ "${{ github.event_name }}" = "schedule" ]; then - VERSION=nightly - elif [[ $GITHUB_REF == refs/tags/* ]]; then - VERSION=${GITHUB_REF#refs/tags/} - fi - TAGS="${DOCKER_IMAGE}:${VERSION}-alpine" - if [[ $VERSION =~ ^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then - MINOR=${VERSION%.*} - MAJOR=${MINOR%.*} - TAGS="$TAGS,${DOCKER_IMAGE}:${MINOR}-alpine,${DOCKER_IMAGE}:${MAJOR}-alpine,${DOCKER_IMAGE}:alpine" - elif [ "${{ github.event_name }}" = "push" ]; then - TAGS="$TAGS,${DOCKER_IMAGE}:sha-${GITHUB_SHA::8}-alpine" - fi - echo ::set-output name=version::${VERSION} - echo ::set-output name=tags::${TAGS} - echo ::set-output name=created::$(date -u +'%Y-%m-%dT%H:%M:%SZ') - - name: Login to DockerHub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - version: v0.9.1 - - name: Copy yarn.lock file - run: cp yarn.lock packages/cubejs-docker - - name: Push to Docker Hub - uses: docker/build-push-action@v3 - with: - context: ./packages/cubejs-docker - file: ./packages/cubejs-docker/latest-alpine.Dockerfile - platforms: linux/amd64 - push: true - tags: ${{ steps.prep.outputs.tags }} - labels: | - org.opencontainers.image.title=${{ fromJson(steps.repo.outputs.result).name }} - org.opencontainers.image.description=${{ fromJson(steps.repo.outputs.result).description }} - org.opencontainers.image.url=${{ fromJson(steps.repo.outputs.result).html_url }} - org.opencontainers.image.source=${{ fromJson(steps.repo.outputs.result).clone_url }} - org.opencontainers.image.version=${{ steps.prep.outputs.version }} - org.opencontainers.image.created=${{ steps.prep.outputs.created }} - org.opencontainers.image.revision=${{ github.sha }} - org.opencontainers.image.licenses=${{ fromJson(steps.repo.outputs.result).license.spdx_id }} - build-args: | - IMAGE_VERSION=${{ steps.prep.outputs.version }} - - docker-debian-jdk: - needs: [npm, cubestore_linux, native_linux] - name: Debian with jdk docker image - runs-on: ubuntu-20.04 - timeout-minutes: 30 - - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - name: Repo metadata - id: repo - uses: actions/github-script@v3 - with: - script: | - const { data } = await github.repos.get(context.repo) - const reg = new RegExp('📊 ', 'ug'); - data.description = data.description.replace(reg, "") - return data - - name: Prepare - id: prep - run: | - DOCKER_IMAGE=cubejs/cube - VERSION=noop - if [ "${{ github.event_name }}" = "schedule" ]; then - VERSION=nightly - elif [[ $GITHUB_REF == refs/tags/* ]]; then - VERSION=${GITHUB_REF#refs/tags/} - fi - TAGS="${DOCKER_IMAGE}:${VERSION}-jdk" - if [[ $VERSION =~ ^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then - MINOR=${VERSION%.*} - MAJOR=${MINOR%.*} - TAGS="$TAGS,${DOCKER_IMAGE}:${MINOR}-jdk,${DOCKER_IMAGE}:${MAJOR}-jdk,${DOCKER_IMAGE}:jdk" - elif [ "${{ github.event_name }}" = "push" ]; then - TAGS="$TAGS,${DOCKER_IMAGE}:sha-${GITHUB_SHA::8}-jdk" - fi - echo ::set-output name=version::${VERSION} - echo ::set-output name=tags::${TAGS} - echo ::set-output name=created::$(date -u +'%Y-%m-%dT%H:%M:%SZ') - - name: Login to DockerHub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - version: v0.9.1 - - name: Copy yarn.lock file - run: cp yarn.lock packages/cubejs-docker - - name: Push to Docker Hub - uses: docker/build-push-action@v3 - with: - context: ./packages/cubejs-docker - file: ./packages/cubejs-docker/latest-debian-jdk.Dockerfile - platforms: linux/amd64 - push: true - tags: ${{ steps.prep.outputs.tags }} - labels: | - org.opencontainers.image.title=${{ fromJson(steps.repo.outputs.result).name }} - org.opencontainers.image.description=${{ fromJson(steps.repo.outputs.result).description }} - org.opencontainers.image.url=${{ fromJson(steps.repo.outputs.result).html_url }} - org.opencontainers.image.source=${{ fromJson(steps.repo.outputs.result).clone_url }} - org.opencontainers.image.version=${{ steps.prep.outputs.version }} - org.opencontainers.image.created=${{ steps.prep.outputs.created }} - org.opencontainers.image.revision=${{ github.sha }} - org.opencontainers.image.licenses=${{ fromJson(steps.repo.outputs.result).license.spdx_id }} - build-args: | - IMAGE_VERSION=${{ steps.prep.outputs.version }} - - docker-cubestore: - name: Cube Store Docker - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - include: - - os: ubuntu-20.04 - target: x86_64-unknown-linux-gnu - platforms: linux/amd64 - build-args: WITH_AVX2=1 - postfix: "" - tag: "latest" - - os: self-hosted - target: aarch64-unknown-linux-gnu - platforms: linux/arm64 - build-args: WITH_AVX2=0 - postfix: "-arm64v8" - tag: "arm64v8" - # Non AVX build - - os: ubuntu-20.04 - target: x86_64-unknown-linux-gnu - platforms: linux/amd64 - build-args: WITH_AVX2=0 - postfix: "-non-avx" - tag: "non-avx" - timeout-minutes: 60 - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - name: Repo metadata - id: repo - uses: actions/github-script@v3 - with: - script: | - const { data } = await github.repos.get(context.repo) - const reg = new RegExp('📊 ', 'ug'); - data.description = data.description.replace(reg, "") - return data - - name: Prepare - id: prep - env: - GITHUB_SHA: ${{ github.sha }} - run: | - DOCKER_IMAGE=cubejs/cubestore - VERSION=noop - if [ "${{ github.event_name }}" = "schedule" ]; then - VERSION=nightly - elif [[ $GITHUB_REF == refs/tags/* ]]; then - VERSION=${GITHUB_REF#refs/tags/} - fi - - TAGS="${DOCKER_IMAGE}:${VERSION}${{ matrix.postfix }}" - - if [[ $VERSION =~ ^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then - MINOR=${VERSION%.*} - MAJOR=${MINOR%.*} - TAGS="$TAGS,${DOCKER_IMAGE}:${MINOR}${{ matrix.postfix }},${DOCKER_IMAGE}:${MAJOR}${{ matrix.postfix }},${DOCKER_IMAGE}:${{ matrix.tag }}" - fi - - echo ::set-output name=version::${VERSION} - echo ::set-output name=tags::${TAGS} - echo ::set-output name=created::$(date -u +'%Y-%m-%dT%H:%M:%SZ') - - name: Login to DockerHub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - version: v0.9.1 - - name: Cache Docker layers - uses: actions/cache@v3 - with: - path: /tmp/.buildx-cache - key: ${{ runner.os }}-${{ matrix.target }}-buildx-${{ matrix.tag }}-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-${{ matrix.target }}-buildx-${{ matrix.tag }}- - - name: Push to Docker Hub - uses: docker/build-push-action@v3 - with: - context: ./rust/cubestore/ - file: ./rust/cubestore/Dockerfile - platforms: ${{ matrix.platforms }} - build-args: ${{ matrix.build-args }} - push: true - tags: ${{ steps.prep.outputs.tags }} - cache-from: type=local,src=/tmp/.buildx-cache - cache-to: type=local,dest=/tmp/.buildx-cache - labels: | - org.opencontainers.image.title=${{ fromJson(steps.repo.outputs.result).name }} - org.opencontainers.image.description=${{ fromJson(steps.repo.outputs.result).description }} - org.opencontainers.image.url=${{ fromJson(steps.repo.outputs.result).html_url }} - org.opencontainers.image.source=${{ fromJson(steps.repo.outputs.result).clone_url }} - org.opencontainers.image.version=${{ steps.prep.outputs.version }} - org.opencontainers.image.created=${{ steps.prep.outputs.created }} - org.opencontainers.image.revision=${{ github.sha }} - org.opencontainers.image.licenses=${{ fromJson(steps.repo.outputs.result).license.spdx_id }} - - name: Update repo description - uses: peter-evans/dockerhub-description@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - repository: cubejs/cubestore - readme-filepath: ./rust/cubestore/README.md - - cubestore_linux: - runs-on: ${{ matrix.os }} - timeout-minutes: 90 - env: - RUSTFLAGS: '-Ctarget-feature=+crt-static' - OPENSSL_STATIC: 1 - strategy: - matrix: - target: - - x86_64-unknown-linux-gnu - - x86_64-unknown-linux-musl - - aarch64-unknown-linux-gnu - include: - - target: x86_64-unknown-linux-gnu - os: ubuntu-20.04 - executable_name: cubestored - cross: true - strip: true - compress: false - - target: x86_64-unknown-linux-musl - os: ubuntu-20.04 - executable_name: cubestored - cross: true - strip: true - # cubestored: CantPackException: bad DT_HASH nbucket=0x344 len=0x1890 - compress: false - - target: aarch64-unknown-linux-gnu - os: ubuntu-20.04 - executable_name: cubestored - cross: true - # Unable to recognise the format of the input file `rust/cubestore/target/aarch64-unknown-linux-gnu/release/cubestored' - strip: false - # UPX is broken, issue https://github.com/cube-js/cube/issues/4474 - compress: false - fail-fast: false - steps: - - uses: actions/checkout@v4 - - name: Disable rustup update (issue workaround for Windows) - run: rustup set auto-self-update disable - if: contains(runner.os, 'windows') - shell: bash - - name: Setup Rust toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2022-06-22 - target: ${{ matrix.target }} - override: true - components: rustfmt - - uses: Swatinem/rust-cache@v2 - with: - workspaces: ./rust/cubestore -> target - prefix-key: v0-rust-cubestore-cross - key: target-${{ matrix.target }} - - run: source .github/actions/${{ matrix.before_script }}.sh - if: ${{ matrix.before_script }} - shell: bash - - name: Build with Cross - if: ${{ matrix.cross }} - run: | - wget -c https://github.com/rust-embedded/cross/releases/download/v0.2.1/cross-v0.2.1-x86_64-unknown-linux-gnu.tar.gz -O - | tar -xz - chmod +x cross && sudo mv cross /usr/local/bin/cross - cd rust/cubestore - cross build --release --target=${{ matrix.target }} - - name: Build with Cargo - if: ${{ !matrix.cross }} - run: | - cd rust/cubestore && cargo build --release --target=${{ matrix.target }} - - name: Compress binaries - uses: svenstaro/upx-action@v2 - if: ${{ matrix.compress }} - with: - file: rust/cubestore/target/${{ matrix.target }}/release/${{ matrix.executable_name }} - args: --lzma - strip: ${{ matrix.strip }} - - name: Create folder for archive - run: | - mkdir cubestore-archive - mkdir cubestore-archive/bin - - name: Create archive for release - run: | - mv rust/cubestore/target/${{ matrix.target }}/release/${{ matrix.executable_name }} cubestore-archive/bin/${{ matrix.executable_name }} - cd cubestore-archive - tar -cvzf cubestored-${{ matrix.target }}.tar.gz * - - name: Upload Binary to Release - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: cubestore-archive/cubestored-${{ matrix.target }}.tar.gz - asset_name: cubestored-${{ matrix.target }}.tar.gz - tag: ${{ github.ref }} - overwrite: true - - cubestore: - runs-on: ${{ matrix.os }} - timeout-minutes: 90 - env: - RUSTFLAGS: '-Ctarget-feature=+crt-static' - OPENSSL_STATIC: 1 - strategy: - matrix: - target: - - x86_64-pc-windows-msvc - - x86_64-apple-darwin - include: - - target: x86_64-pc-windows-msvc - os: windows-2019 - executable_name: cubestored.exe - cross: false - strip: true - # cubestored.exe: CantPackException: superfluous data between sections - compress: false - # Please use minimal possible version of macOS, because it produces constraint on libstdc++ - tar_executable: tar - - target: x86_64-apple-darwin - os: macos-11 - executable_name: cubestored - cross: false - strip: false - compress: false - # bsd tar has a different format with Sparse files which breaks download script - tar_executable: gtar - fail-fast: false - steps: - - uses: actions/checkout@v4 - - name: Disable rustup update (issue workaround for Windows) - run: rustup set auto-self-update disable - if: contains(runner.os, 'windows') - shell: bash - - name: Setup Rust toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2022-06-22 - target: ${{ matrix.target }} - override: true - components: rustfmt - - uses: Swatinem/rust-cache@v2 - with: - workspaces: ./rust/cubestore -> target - prefix-key: v0-rust-cubestore-cross - key: target-${{ matrix.target }} - - run: source .github/actions/${{ matrix.before_script }}.sh - if: ${{ matrix.before_script }} - shell: bash - - uses: ilammy/msvc-dev-cmd@v1 - if: ${{ startsWith(matrix.os, 'windows') }} - - name: Install OpenSSL for Windows - if: ${{ startsWith(matrix.os, 'windows') }} - run: vcpkg integrate install; vcpkg install openssl:x64-windows - - name: Instal LLVM for Windows - if: ${{ startsWith(matrix.os, 'windows') }} - run: choco install -y --force llvm --version 9.0.1 - - name: Set Env Variables for Windows - uses: allenevans/set-env@v3.0.0 - if: ${{ startsWith(matrix.os, 'windows') }} - with: - OPENSSL_DIR: 'C:/vcpkg/packages/openssl_x64-windows' - # This paths are required to work with static linking - OPENSSL_LIB_DIR: 'C:/vcpkg/packages/openssl_x64-windows/lib' - OPENSSL_INCLUDE_DIR: 'C:/vcpkg/packages/openssl_x64-windows/include' - LIBCLANG_PATH: 'C:\Program Files\LLVM\bin' - # Hotfix before https://github.com/actions/runner-images/pull/7125 will be released/rolled on the productions servers - - name: Hotfix for macOS (pkg-config) - if: contains(runner.os, 'macos') - run: brew install pkg-config - - name: Build with Cross - if: ${{ matrix.cross }} - run: | - wget -c https://github.com/rust-embedded/cross/releases/download/v0.2.1/cross-v0.2.1-x86_64-unknown-linux-gnu.tar.gz -O - | tar -xz - chmod +x cross && sudo mv cross /usr/local/bin/cross - cd rust/cubestore - cross build --release --target=${{ matrix.target }} - - name: Build with Cargo - if: ${{ !matrix.cross }} - run: | - cd rust/cubestore && cargo build --release --target=${{ matrix.target }} - - name: Compress binaries - uses: svenstaro/upx-action@v2 - if: ${{ matrix.compress }} - with: - file: rust/cubestore/target/${{ matrix.target }}/release/${{ matrix.executable_name }} - args: --lzma - strip: ${{ matrix.strip }} - - name: Create folder for archive - run: | - mkdir cubestore-archive - mkdir cubestore-archive/bin - - name: Copy/paste OpenSSL to Archive (hotfix for Windows) - if: ${{ startsWith(matrix.os, 'windows') }} - run: cp C:/vcpkg/packages/openssl_x64-windows/bin/*.dll cubestore-archive/bin - - name: Create archive for release - run: | - mv rust/cubestore/target/${{ matrix.target }}/release/${{ matrix.executable_name }} cubestore-archive/bin/${{ matrix.executable_name }} - cd cubestore-archive - ${{ matrix.tar_executable }} -cvzf cubestored-${{ matrix.target }}.tar.gz * - - name: Upload Binary to Release - uses: svenstaro/upload-release-action@v2 - with: - repo_token: ${{ secrets.GITHUB_TOKEN }} - file: cubestore-archive/cubestored-${{ matrix.target }}.tar.gz - asset_name: cubestored-${{ matrix.target }}.tar.gz - tag: ${{ github.ref }} - overwrite: true diff --git a/.github/workflows/push-cross-images.yml b/.github/workflows/push-cross-images.yml deleted file mode 100644 index 756160ae4ac4e..0000000000000 --- a/.github/workflows/push-cross-images.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: Cross Images - -on: - push: - paths: - - '.github/workflows/push-cross-images.yml' - - 'rust/cubestore/cross/**' - branches: - - 'master' - pull_request: - paths: - - '.github/workflows/push-cross-images.yml' - - 'rust/cubestore/cross/**' - -jobs: - docker-dev: - name: Build cross image for ${{ matrix.target }} target - runs-on: ubuntu-20.04 - timeout-minutes: 120 - strategy: - matrix: - target: - - x86_64-unknown-linux-gnu - - x86_64-unknown-linux-musl - - aarch64-unknown-linux-gnu - fail-fast: false - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Login to DockerHub - if: ${{ github.ref == 'refs/heads/master' }} - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - version: v0.9.1 - driver-opts: network=host - - name: Load .cross file - uses: xom9ikk/dotenv@v2 - with: - path: rust/cubestore/cross/ - - name: Push to Docker Hub - uses: docker/build-push-action@v3 - with: - context: ./ - file: ./rust/cubestore/cross/${{ matrix.target }}.Dockerfile - platforms: linux/amd64 - push: ${{ github.ref == 'refs/heads/master' }} - tags: cubejs/rust-cross:${{ matrix.target }},cubejs/rust-cross:${{ matrix.target }}-${{ env.CROSS_VERSION }} diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml deleted file mode 100644 index 4f89228285033..0000000000000 --- a/.github/workflows/push.yml +++ /dev/null @@ -1,643 +0,0 @@ -name: Build - -on: - push: - paths: - - '.github/actions/smoke.sh' - - '.github/workflows/push.yml' - - '.github/workflows/master.yml' - - 'packages/**' - - 'rust/cubestore/js-wrapper/**' - - 'rust/cubestore/tsconfig.json' - - 'rust/cubestore/package.json' - - 'rust/cubesql/**' - - '.eslintrc.js' - - '.prettierrc' - - 'package.json' - - 'lerna.json' - - 'rollup.config.js' - - 'yarn.lock' - branches: - - 'master' - pull_request: - paths: - - '.github/workflows/push.yml' - - '.github/workflows/master.yml' - - 'packages/**' - - 'rust/cubestore/js-wrapper/**' - - 'rust/cubestore/tsconfig.json' - - 'rust/cubestore/package.json' - - 'rust/cubesql/**' - - '.eslintrc.js' - - '.prettierrc' - - 'package.json' - - 'lerna.json' - - 'rollup.config.js' - - 'yarn.lock' - -jobs: - unit: - runs-on: ubuntu-20.04 - timeout-minutes: 60 - needs: latest-tag-sha - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - - strategy: - matrix: - node-version: [16.x, 18.x] - fail-fast: false - - steps: - - id: get-tag-out - run: echo "$OUT" - env: - OUT: ${{ needs['latest-tag-sha'].outputs.sha }} - - name: Checkout - uses: actions/checkout@v4 - with: - # pulls all commits (needed for codecov) - fetch-depth: 2 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Lerna tsc - run: yarn tsc - - name: Build client - run: yarn build - - name: Lerna test - run: yarn lerna run --concurrency 1 --stream --no-prefix unit - - uses: codecov/codecov-action@v1 - if: (matrix.node-version == '16.x') - with: - files: ./packages/*/coverage/clover.xml - flags: cube-backend - verbose: true # optional (default = false) - - lint: - runs-on: ubuntu-20.04 - timeout-minutes: 60 - needs: latest-tag-sha - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - - name: Install Node.js 16.x - uses: actions/setup-node@v3 - with: - node-version: 16.x - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: NPM lint - run: yarn lint:npm - - name: Lerna lint - run: yarn lerna run --concurrency 1 lint - - build: - runs-on: ubuntu-20.04 - timeout-minutes: 60 - needs: latest-tag-sha - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - - name: Install Node.js 16.x - uses: actions/setup-node@v3 - with: - node-version: 16.x - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Check Yarn lock wasn't modified - run: if [ "$(git status | grep nothing)x" = "x" ]; then echo "Non empty changeset after lerna bootstrap"; git status; exit 1; else echo "Nothing to commit. Proceeding"; fi; - - name: Build Core Client libraries - run: yarn build - - name: Build other packages - run: yarn lerna run --concurrency 1 build - env: - NODE_OPTIONS: --max_old_space_size=4096 - - build-cubestore: - needs: [latest-tag-sha] - runs-on: ubuntu-20.04 - timeout-minutes: 60 - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - steps: - - name: Maximize build space (disk space limitations) - run: | - echo "Before" - df -h - sudo apt-get remove -y 'php.*' - sudo apt-get remove -y '^mongodb-.*' - sudo apt-get remove -y '^mysql-.*' - sudo apt-get autoremove -y - sudo apt-get clean - - sudo rm -rf /usr/share/dotnet - sudo rm -rf /usr/local/lib/android - sudo rm -rf /opt/ghc - sudo rm -rf /opt/hostedtoolcache/CodeQL - echo "After" - df -h - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2022-06-22 - override: true - components: rustfmt - - uses: Swatinem/rust-cache@v2 - with: - workspaces: ./rust/cubestore -> target - # Separate path for release key to protect cache bloating - shared-key: cubestore-release - key: ubuntu-20.04 - - name: Build cubestore - uses: actions-rs/cargo@v1 - with: - command: build - args: --manifest-path rust/cubestore/Cargo.toml -j 4 --release - - name: 'Upload cubestored-x86_64-unknown-linux-gnu-release artifact' - uses: actions/upload-artifact@v3 - with: - name: cubestored-x86_64-unknown-linux-gnu-release - path: ./rust/cubestore/target/release/cubestored - retention-days: 5 - - integration-cubestore: - needs: [latest-tag-sha, build-cubestore] - runs-on: ubuntu-20.04 - timeout-minutes: 60 - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - - strategy: - matrix: - node-version: [16.x] - fail-fast: false - - steps: - - name: Maximize build space (disk space limitations) - run: | - echo "Before" - df -h - sudo apt-get remove -y 'php.*' - sudo apt-get remove -y '^mongodb-.*' - sudo apt-get remove -y '^mysql-.*' - sudo apt-get autoremove -y - sudo apt-get clean - - sudo rm -rf /usr/share/dotnet - sudo rm -rf /usr/local/lib/android - sudo rm -rf /opt/ghc - sudo rm -rf /opt/hostedtoolcache/CodeQL - echo "After" - df -h - - name: Checkout - uses: actions/checkout@v4 - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Lerna tsc - run: yarn tsc - - name: Download cubestored-x86_64-unknown-linux-gnu-release artifact - uses: actions/download-artifact@v3 - with: - path: ./rust/cubestore/target/release/ - name: cubestored-x86_64-unknown-linux-gnu-release - - name: Run Cube Store in background - run: | - chmod +x ./rust/cubestore/target/release/cubestored - RUNNER_TRACKING_ID="" && ./rust/cubestore/target/release/cubestored & - - name: Run Cubestore Integration - timeout-minutes: 10 - run: | - yarn lerna run --concurrency 1 --stream --no-prefix integration:cubestore - - integration: - needs: [unit, lint, latest-tag-sha] - runs-on: ubuntu-20.04 - timeout-minutes: 60 - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - - strategy: - matrix: - node-version: [16.x] - db: [ - 'clickhouse', 'druid', 'elasticsearch', 'mssql', 'mysql', 'postgres', 'prestodb', - 'mysql-aurora-serverless', 'crate', 'mongobi' - ] - fail-fast: false - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Lerna tsc - run: yarn tsc - - name: Run Integration tests for ${{ matrix.db }} matrix - timeout-minutes: 30 - run: ./.github/actions/integration/${{ matrix.db }}.sh - - integration-smoke: - needs: [ latest-tag-sha, build-cubestore ] - runs-on: ubuntu-20.04 - timeout-minutes: 90 - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - - strategy: - matrix: - node-version: [ 16.x ] - fail-fast: false - - steps: - - name: Maximize build space (disk space limitations) - run: | - echo "Before" - df -h - sudo apt-get remove -y 'php.*' - sudo apt-get remove -y '^mongodb-.*' - sudo apt-get remove -y '^mysql-.*' - sudo apt-get autoremove -y - sudo apt-get clean - - sudo rm -rf /usr/share/dotnet - sudo rm -rf /usr/local/lib/android - sudo rm -rf /opt/ghc - sudo rm -rf /opt/hostedtoolcache/CodeQL - echo "After" - df -h - - name: Checkout - uses: actions/checkout@v4 - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Install instant client for Oracle - uses: GoodManWEN/oracle-client-action@main - - name: Build client - run: yarn build - - name: Lerna tsc - run: yarn tsc - - name: Download cubestored-x86_64-unknown-linux-gnu-release artifact - uses: actions/download-artifact@v3 - with: - path: rust/cubestore/downloaded/latest/bin/ - name: cubestored-x86_64-unknown-linux-gnu-release - - name: Chmod +x for cubestored - run: | - chmod +x ./rust/cubestore/downloaded/latest/bin/cubestored - - name: Run Integration smoke tests - timeout-minutes: 30 - run: ./.github/actions/smoke.sh - - docker-image-latest-set-tag: - # At least git should be completed pushed up until this moment - needs: [lint, latest-tag-sha] - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - runs-on: ubuntu-20.04 - outputs: - tag: ${{ steps.get-tag.outputs.tag }} - steps: - - name: Checkout - uses: actions/checkout@v4 - - id: get-tag - run: echo "::set-output name=tag::$(git tag --contains $GITHUB_SHA)" - env: - GITHUB_SHA: ${{ github.sha }} - - latest-tag-sha: - runs-on: ubuntu-20.04 - outputs: - sha: ${{ steps.get-tag.outputs.sha }} - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - id: git-log - run: git log HEAD~30..HEAD - - id: get-tag-test - run: echo "$SHA $(git rev-list -n 1 $(git tag --contains $SHA))" - env: - SHA: ${{ github.sha }} - - id: get-tag - run: echo "::set-output name=sha::$(git rev-list -n 1 $(git tag --contains $SHA))" - env: - SHA: ${{ github.sha }} - - id: get-tag-out - run: echo "$OUT" - env: - OUT: ${{ steps.get-tag.outputs.sha }} - - docker-dev: - needs: [latest-tag-sha] - if: (needs['latest-tag-sha'].outputs.sha != github.sha) - name: Build & Test :dev for ${{ matrix.name }} without pushing - runs-on: ubuntu-22.04 - timeout-minutes: 60 - services: - registry: - image: registry:2 - ports: - - 5000:5000 - strategy: - matrix: - dockerfile: - - dev.Dockerfile - include: - - dockerfile: dev.Dockerfile - name: Debian - tag: tmp-dev - fail-fast: false - steps: - - name: Maximize build space (disk space limitations) - run: | - echo "Before" - df -h - sudo apt-get remove -y 'php.*' - sudo apt-get remove -y '^mongodb-.*' - sudo apt-get remove -y '^mysql-.*' - sudo apt-get autoremove -y - sudo apt-get clean - - sudo rm -rf /usr/share/dotnet - sudo rm -rf /usr/local/lib/android - sudo rm -rf /opt/ghc - sudo rm -rf /opt/hostedtoolcache/CodeQL - echo "After" - df -h - - name: Checkout - uses: actions/checkout@v4 - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - version: v0.9.1 - driver-opts: network=host - - name: Build image - uses: docker/build-push-action@v3 - timeout-minutes: 30 - with: - context: . - file: ./packages/cubejs-docker/${{ matrix.dockerfile }} - platforms: linux/amd64 - push: true - tags: localhost:5000/cubejs/cube:${{ matrix.tag }} - - name: Use Node.js 16.x - uses: actions/setup-node@v3 - with: - node-version: 16.x - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Build client - run: yarn build - - name: Lerna tsc - run: yarn tsc - - name: Testing CubeJS (container mode) via BirdBox - run: | - cd packages/cubejs-testing/ - export BIRDBOX_CUBEJS_VERSION=${{ matrix.tag }} - export BIRDBOX_CUBEJS_REGISTRY_PATH=localhost:5000/ - export DEBUG=testcontainers - yarn run dataset:minimal - yarn run birdbox:postgresql - yarn run birdbox:postgresql-pre-aggregations - # - name: Testing Athena driver (container mode) via BirdBox - # env: - # CUBEJS_AWS_KEY: ${{ secrets.CUBEJS_AWS_KEY }} - # CUBEJS_AWS_SECRET: ${{ secrets.CUBEJS_AWS_SECRET }} - # CUBEJS_AWS_REGION: us-east-1 - # CUBEJS_AWS_S3_OUTPUT_LOCATION: s3://cubejs-opensource/testing/output - # CUBEJS_DB_EXPORT_BUCKET: s3://cubejs-opensource/testing/export - # run: | - # cd packages/cubejs-testing/ - # export BIRDBOX_CUBEJS_VERSION=${{ matrix.tag }} - # export BIRDBOX_CUBEJS_REGISTRY_PATH=localhost:5000/ - # export DEBUG=testcontainers - # yarn run driver:athena --log=ignore --mode=docker - # - name: Testing BigQuery driver (container mode) via BirdBox - # env: - # CUBEJS_DB_BQ_CREDENTIALS: ${{ secrets.CUBEJS_DB_BQ_CREDENTIALS }} - # CUBEJS_DB_BQ_PROJECT_ID: cube-open-source - # CUBEJS_DB_EXPORT_BUCKET: cube-open-source-export-bucket - # run: | - # cd packages/cubejs-testing/ - # export BIRDBOX_CUBEJS_VERSION=${{ matrix.tag }} - # export BIRDBOX_CUBEJS_REGISTRY_PATH=localhost:5000/ - # export DEBUG=testcontainers - # yarn run driver:bigquery --log=ignore --mode=docker - - name: Testing PostgreSQL driver (container mode) via BirdBox - env: - CUBEJS_DB_TYPE: postgres - CUBEJS_DB_USER: postgres - CUBEJS_DB_PASS: postgres - run: | - cd packages/cubejs-testing/ - export BIRDBOX_CUBEJS_VERSION=${{ matrix.tag }} - export BIRDBOX_CUBEJS_REGISTRY_PATH=localhost:5000/ - export DEBUG=testcontainers - yarn run driver:postgres --log=ignore --mode=docker - - name: Testing Docker image via Cypress (Chrome) - env: - CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} - BIRDBOX_CYPRESS_UPDATE_SCREENSHOTS: ${{ contains(github.event.head_commit.message, '[update screenshots]') }} - run: | - cd packages/cubejs-testing/ - export BIRDBOX_CUBEJS_VERSION=${{ matrix.tag }} - export BIRDBOX_CUBEJS_REGISTRY_PATH=localhost:5000/ - export BIRDBOX_CYPRESS_BROWSER=chrome - export BIRDBOX_CYPRESS_TARGET=postgresql - export DEBUG=testcontainers - yarn run cypress:install - yarn run cypress:birdbox - - name: Upload screenshots on failure - uses: actions/upload-artifact@v2 - if: failure() - with: - name: cypress-screenshots-docker-dev-${{ matrix.name }} - path: packages/cubejs-testing/cypress/screenshots diff --git a/.github/workflows/rust-cubesql.yml b/.github/workflows/rust-cubesql.yml deleted file mode 100644 index a48266d619d1e..0000000000000 --- a/.github/workflows/rust-cubesql.yml +++ /dev/null @@ -1,393 +0,0 @@ -name: Build native - -on: - push: - paths: - - '.github/workflows/rust-cubesql.yml' - - 'packages/cubejs-backend-native/**' - - 'rust/cubesql/**' - - 'rust/cubesql/**' - branches: - - 'master' - pull_request: - paths: - - '.github/workflows/rust-cubesql.yml' - - 'packages/cubejs-backend-native/**' - - 'rust/cubesql/**' - - 'rust/cubesql/**' - -jobs: - lint: - runs-on: ubuntu-20.04 - timeout-minutes: 20 - name: Check fmt/clippy - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt, clippy - - uses: Swatinem/rust-cache@v2 - with: - workspaces: ./rust/cubesql -> target - # default key - key: cubesql-${{ runner.OS }}-x86_64-unknown-linux-gnu-16 - shared-key: cubesql-${{ runner.OS }}-x86_64-unknown-linux-gnu-16 - - name: Lint CubeSQL - run: cd rust/cubesql/cubesql && cargo fmt --all -- --check - - name: Lint Native - run: cd packages/cubejs-backend-native && cargo fmt --all -- --check - - name: Clippy Native - run: cd packages/cubejs-backend-native && cargo clippy -- -D warnings - # CubeSQL is not ready for Clippy - #- name: Clippy CubeSQL - # run: cd rust/cubesql && cargo clippy -- -D warnings - - unit: - runs-on: ubuntu-20.04 - timeout-minutes: 60 - name: Unit (Rewrite Engine) - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # pulls all commits (needed for codecov) - fetch-depth: 2 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2022-06-22 - override: true - components: rustfmt - - uses: Swatinem/rust-cache@v2 - with: - workspaces: ./rust/cubesql -> target - # default key - key: cubesql-${{ runner.OS }}-x86_64-unknown-linux-gnu-16 - shared-key: cubesql-${{ runner.OS }}-x86_64-unknown-linux-gnu-16 - - name: Install tarpaulin@0.20.1 - uses: baptiste0928/cargo-install@v2 - with: - crate: cargo-tarpaulin - version: "0.20.1" - - name: Unit tests (Rewrite Engine) - env: - CUBESQL_TESTING_CUBE_TOKEN: ${{ secrets.CUBESQL_TESTING_CUBE_TOKEN }} - CUBESQL_TESTING_CUBE_URL: ${{ secrets.CUBESQL_TESTING_CUBE_URL }} - CUBESQL_REWRITE_ENGINE: true - CUBESQL_SQL_PUSH_DOWN: true - CUBESQL_REWRITE_CACHE: true - CUBESQL_REWRITE_TIMEOUT: 60 - run: cd rust/cubesql && cargo tarpaulin --workspace --no-fail-fast --avoid-cfg-tarpaulin --out Xml - - name: Upload code coverage - uses: codecov/codecov-action@v3 - with: - files: ./rust/cubesql/cobertura.xml - verbose: true - flags: cubesql - fail_ci_if_error: false - - unit_legacy: - runs-on: ubuntu-20.04 - timeout-minutes: 60 - name: Unit (Legacy) - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - # pulls all commits (needed for codecov) - fetch-depth: 2 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - - uses: Swatinem/rust-cache@v2 - with: - workspaces: ./rust/cubesql -> target - # default key - key: cubesql-${{ runner.OS }}-x86_64-unknown-linux-gnu-16 - shared-key: cubesql-${{ runner.OS }}-x86_64-unknown-linux-gnu-16 - - name: Unit tests (Legacy Engine) - env: - CUBESQL_TESTING_CUBE_TOKEN: ${{ secrets.CUBESQL_TESTING_CUBE_TOKEN }} - CUBESQL_TESTING_CUBE_URL: ${{ secrets.CUBESQL_TESTING_CUBE_URL }} - CUBESQL_SQL_PUSH_DOWN: true - CUBESQL_REWRITE_CACHE: true - run: cd rust/cubesql && cargo test - - native_linux: - needs: [lint] - runs-on: ubuntu-20.04 - timeout-minutes: 60 - name: Build Linux GNU ${{ matrix.node-version }}.x ${{ matrix.target }} with Python ${{ matrix.python-version }} - strategy: - matrix: - # Current used version + 1 LTS - node-version: [16, 18] - python-version: ["3.9", "3.10", "3.11", "3.12", "fallback"] - target: ["x86_64-unknown-linux-gnu", "aarch64-unknown-linux-gnu"] - # minimize number of jobs - exclude: - - node-version: 18 - target: "aarch64-unknown-linux-gnu" - - python-version: 3.10 - target: "aarch64-unknown-linux-gnu" - - python-version: 3.11 - target: "aarch64-unknown-linux-gnu" - fail-fast: false - container: - image: cubejs/rust-cross:${{ matrix.target }}-30052023 - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - target: ${{ matrix.target }} - - uses: Swatinem/rust-cache@v2 - with: - workspaces: ./rust/cubesql -> target - key: cubesql-${{ runner.OS }}-${{ matrix.target }}-${{ matrix.node-version }} - shared-key: cubesql-${{ runner.OS }}-${{ matrix.target }}-${{ matrix.node-version }} - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Install Yarn - run: npm install -g yarn - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Lerna tsc - run: yarn tsc - - name: Build native (fallback) - if: (matrix.python-version == 'fallback') - env: - CARGO_BUILD_TARGET: ${{ matrix.target }} - working-directory: ./packages/cubejs-backend-native - run: yarn run native:build-debug - - name: Setup cross compilation - if: (matrix.target == 'aarch64-unknown-linux-gnu') - uses: allenevans/set-env@v3.0.0 - with: - PYO3_CROSS_PYTHON_VERSION: ${{ matrix.python-version }} - - name: Build native (with Python) - if: (matrix.python-version != 'fallback') - env: - PYO3_PYTHON: python${{ matrix.python-version }} - CARGO_BUILD_TARGET: ${{ matrix.target }} - working-directory: ./packages/cubejs-backend-native - run: yarn run native:build-debug-python - - name: Test native (GNU only) - if: (matrix.target == 'x86_64-unknown-linux-gnu') - env: - CUBESQL_STREAM_MODE: true - CUBEJS_NATIVE_INTERNAL_DEBUG: true - working-directory: ./packages/cubejs-backend-native - run: yarn run test:unit - - name: Run E2E Smoke testing over whole Cube (GNU only) - if: (matrix.target == 'x86_64-unknown-linux-gnu') - env: - CUBEJS_NATIVE_INTERNAL_DEBUG: true - working-directory: ./packages/cubejs-testing - run: yarn smoke:cubesql - - native_macos: - needs: [lint] - runs-on: ${{ matrix.os-version }} - timeout-minutes: 60 - name: Build ${{ matrix.os-version }} ${{ matrix.target }} ${{ matrix.node-version }} with Python ${{ matrix.python-version }} - - strategy: - matrix: - # We do not need to test under all versions, we do it under linux - node-version: [16.x] - os-version: ["macos-11"] - target: ["x86_64-apple-darwin", "aarch64-apple-darwin"] - include: - - target: x86_64-apple-darwin - python-version: "3.9" - - target: x86_64-apple-darwin - python-version: "3.10" - - target: x86_64-apple-darwin - python-version: "3.11" - - target: x86_64-apple-darwin - python-version: "3.12" - - target: x86_64-apple-darwin - python-version: "fallback" - - target: aarch64-apple-darwin - python-version: "fallback" - fail-fast: false - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - target: ${{ matrix.target }} - - name: Install Python - uses: actions/setup-python@v4 - if: (matrix.python-version != 'fallback') - with: - python-version: ${{ matrix.python-version }} - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Lerna tsc - run: yarn tsc - - name: Build native (fallback) - if: (matrix.python-version == 'fallback') - env: - CUBESQL_STREAM_MODE: true - CUBEJS_NATIVE_INTERNAL_DEBUG: true - CARGO_BUILD_TARGET: ${{ matrix.target }} - run: cd packages/cubejs-backend-native && yarn run native:build - - name: Build native (with Python) - if: (matrix.python-version != 'fallback') - env: - PYO3_PYTHON: python${{ matrix.python-version }} - CARGO_BUILD_TARGET: ${{ matrix.target }} - run: cd packages/cubejs-backend-native && yarn run native:build-debug-python - - name: Tests - # We cannot test arm64 on x64 - if: (matrix.target == 'x86_64-apple-darwin') - env: - CUBESQL_STREAM_MODE: true - CUBEJS_NATIVE_INTERNAL_DEBUG: true - run: cd packages/cubejs-backend-native && yarn run test:unit - - native_windows: - needs: [lint] - runs-on: ${{ matrix.os-version }} - timeout-minutes: 60 - name: Build ${{ matrix.os-version }} ${{ matrix.node-version }} with Python ${{ matrix.python-version }} - - strategy: - matrix: - # We do not need to test under all versions, we do it under linux - node-version: [16.x] - os-version: [windows-2019] - python-version: ["fallback"] - fail-fast: false - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Disable rustup update (issue workaround for Windows) - run: rustup set auto-self-update disable - shell: bash - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2023-12-13 - override: true - components: rustfmt - - name: Install Python - uses: actions/setup-python@v4 - if: (matrix.python-version != 'fallback') - with: - python-version: ${{ matrix.python-version }} - - name: Install Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.node-version }} - - name: Set Yarn version - run: yarn policies set-version v1.22.19 - - name: Get yarn cache directory path - id: yarn-cache-dir-path - run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT - shell: bash - - name: Restore yarn cache - uses: actions/cache@v3 - with: - path: ${{ steps.yarn-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} - restore-keys: | - ${{ runner.os }}-yarn- - - name: Yarn install - uses: nick-invision/retry@v2 - env: - CUBESTORE_SKIP_POST_INSTALL: true - with: - max_attempts: 3 - retry_on: error - retry_wait_seconds: 15 - timeout_minutes: 20 - command: yarn install --frozen-lockfile - - name: Lerna tsc - run: yarn tsc - - name: Build native (fallback) - if: (matrix.python-version == 'fallback') - env: - CUBESQL_STREAM_MODE: true - CUBEJS_NATIVE_INTERNAL_DEBUG: true - run: cd packages/cubejs-backend-native && yarn run native:build - - name: Build native (with Python) - if: (matrix.python-version != 'fallback') - env: - PYO3_PYTHON: python${{ matrix.python-version }} - run: cd packages/cubejs-backend-native && yarn run native:build-debug-python - - name: Tests - env: - CUBESQL_STREAM_MODE: true - CUBEJS_NATIVE_INTERNAL_DEBUG: true - run: cd packages/cubejs-backend-native && yarn run test:unit diff --git a/.github/workflows/rust-cubestore-master.yml b/.github/workflows/rust-cubestore-master.yml deleted file mode 100644 index cb9166e0418f1..0000000000000 --- a/.github/workflows/rust-cubestore-master.yml +++ /dev/null @@ -1,286 +0,0 @@ -name: Rust Master -on: - push: - paths: - - '.github/workflows/rust-cubestore-master.yml' - - 'rust/cubestore/**' - branches: - - master - -jobs: - debian: - name: Debian Rust ${{ matrix.rust }} - # 22.04 has gcc 11, new binutils (ld) - runs-on: ubuntu-22.04 - timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - rust: [nightly-2022-06-22] - env: - RUST: ${{ matrix.rust }} - steps: - - name: Maximize build space (disk space limitations) - run: | - echo "Before" - df -h - sudo apt-get remove -y 'php.*' - sudo apt-get remove -y '^mongodb-.*' - sudo apt-get remove -y '^mysql-.*' - sudo apt-get autoremove -y - sudo apt-get clean - - sudo rm -rf /usr/share/dotnet - sudo rm -rf /usr/local/lib/android - sudo rm -rf /opt/ghc - sudo rm -rf /opt/hostedtoolcache/CodeQL - echo "After" - df -h - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: ${{ matrix.rust }} - override: true - components: rustfmt - - uses: Swatinem/rust-cache@v2 - with: - workspaces: ./rust/cubestore -> target - shared-key: cubestore - key: ubuntu-22.04 - - name: Run cargo fmt cubestore - uses: actions-rs/cargo@v1 - with: - command: fmt - args: --manifest-path rust/cubestore/cubestore/Cargo.toml -- --check - - name: Run cargo fmt cubehll - uses: actions-rs/cargo@v1 - with: - command: fmt - args: --manifest-path rust/cubestore/cubehll/Cargo.toml -- --check - - name: Run cargo build - uses: actions-rs/cargo@v1 - with: - command: build - args: --manifest-path rust/cubestore/Cargo.toml -j 4 - - name: Run cargo test - uses: actions-rs/cargo@v1 - env: - CUBESTORE_AWS_ACCESS_KEY_ID: ${{ secrets.CUBESTORE_AWS_ACCESS_KEY_ID }} - CUBESTORE_AWS_SECRET_ACCESS_KEY: ${{ secrets.CUBESTORE_AWS_SECRET_ACCESS_KEY }} - SERVICE_ACCOUNT_JSON: ${{ secrets.SERVICE_ACCOUNT_JSON }} - TEST_KSQL_USER: ${{ secrets.TEST_KSQL_USER }} - TEST_KSQL_PASS: ${{ secrets.TEST_KSQL_PASS }} - TEST_KSQL_URL: ${{ secrets.TEST_KSQL_URL }} - with: - command: test - args: --manifest-path rust/cubestore/Cargo.toml -j 1 - - cubestore-docker-image-dev: - name: Release Cube Store :dev image - runs-on: ${{ matrix.os }} - strategy: - matrix: - target: - - x86_64-unknown-linux-gnu - - aarch64-unknown-linux-gnu - include: - - os: ubuntu-20.04 - target: x86_64-unknown-linux-gnu - platforms: linux/amd64 - build-args: WITH_AVX2=1 - postfix: "" - - os: self-hosted - target: aarch64-unknown-linux-gnu - platforms: linux/arm64 - build-args: WITH_AVX2=0 - postfix: "-arm64v8" - timeout-minutes: 60 - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - name: Repo metadata - id: repo - uses: actions/github-script@v3 - with: - script: | - const repo = await github.repos.get(context.repo) - return repo.data - - name: Prepare - id: prep - env: - GITHUB_SHA: ${{ github.sha }} - run: | - DOCKER_IMAGE=cubejs/cubestore - VERSION=dev${{ matrix.postfix }} - - if [ "${{ github.event_name }}" = "schedule" ]; then - VERSION=nightly - elif [[ $GITHUB_REF == refs/tags/* ]]; then - VERSION=${GITHUB_REF#refs/tags/} - elif [[ $GITHUB_REF == refs/heads/* ]]; then - VERSION=$(echo ${GITHUB_REF#refs/heads/} | sed -r 's#/+#-#g') - if [ "${{ github.event.repository.default_branch }}" = "$VERSION" ]; then - VERSION=edge - fi - elif [[ $GITHUB_REF == refs/pull/* ]]; then - VERSION=pr-${{ github.event.number }} - fi - - TAGS="${DOCKER_IMAGE}:${VERSION}" - - if [[ $VERSION =~ ^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then - MINOR=${VERSION%.*} - MAJOR=${MINOR%.*} - TAGS="$TAGS,${DOCKER_IMAGE}:${MINOR},${DOCKER_IMAGE}:${MAJOR}" - elif [ "${{ github.event_name }}" = "push" ]; then - TAGS="$TAGS,${DOCKER_IMAGE}:build-1${GITHUB_RUN_NUMBER}${{ matrix.postfix }}" - fi - - echo ::set-output name=version::${VERSION} - echo ::set-output name=tags::${TAGS} - echo ::set-output name=created::$(date -u +'%Y-%m-%dT%H:%M:%SZ') - - name: Login to DockerHub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - version: v0.9.1 - - name: Cache Docker layers - uses: actions/cache@v3 - with: - path: /tmp/.buildx-cache - key: ${{ runner.os }}-${{ matrix.target }}-buildx-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-${{ matrix.target }}-buildx- - - name: Push to Docker Hub - uses: docker/build-push-action@v3 - with: - context: ./rust/cubestore - file: ./rust/cubestore/Dockerfile - platforms: ${{ matrix.platforms }} - build-args: ${{ matrix.build-args }} - push: true - tags: ${{ steps.prep.outputs.tags }} - cache-from: type=local,src=/tmp/.buildx-cache - cache-to: type=local,dest=/tmp/.buildx-cache - labels: | - org.opencontainers.image.title=${{ fromJson(steps.repo.outputs.result).name }} - org.opencontainers.image.description=${{ fromJson(steps.repo.outputs.result).description }} - org.opencontainers.image.url=${{ fromJson(steps.repo.outputs.result).html_url }} - org.opencontainers.image.source=${{ fromJson(steps.repo.outputs.result).clone_url }} - org.opencontainers.image.version=${{ steps.prep.outputs.version }} - org.opencontainers.image.created=${{ steps.prep.outputs.created }} - org.opencontainers.image.revision=${{ github.sha }} - org.opencontainers.image.licenses=${{ fromJson(steps.repo.outputs.result).license.spdx_id }} - - name: Update repo description - uses: peter-evans/dockerhub-description@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - repository: cubejs/cubestore - readme-filepath: ./rust/cubestore/README.md - - cross: - runs-on: ${{ matrix.os }} - timeout-minutes: 90 - env: - RUSTFLAGS: '-Ctarget-feature=+crt-static' - OPENSSL_STATIC: 1 - strategy: - matrix: - target: - - x86_64-unknown-linux-gnu - - x86_64-unknown-linux-musl - - x86_64-apple-darwin - - aarch64-unknown-linux-gnu - include: - - os: ubuntu-20.04 - target: x86_64-unknown-linux-gnu - executable_name: cubestored - cross: true - strip: true - compress: false - - os: ubuntu-20.04 - target: x86_64-unknown-linux-musl - executable_name: cubestored - cross: true - strip: true - # cubestored: CantPackException: bad DT_HASH nbucket=0x344 len=0x1890 - compress: false - # Please use minimal possible version of macOS, because it produces constraint on libstdc++ - - os: macos-11 - target: x86_64-apple-darwin - executable_name: cubestored - cross: false - strip: true - compress: true - - os: ubuntu-20.04 - target: aarch64-unknown-linux-gnu - executable_name: cubestored - cross: true - # Unable to recognise the format of the input file `rust/cubestore/target/aarch64-unknown-linux-gnu/release/cubestored' - strip: false - # UPX is broken, issue https://github.com/cube-js/cube/issues/4474 - compress: false - fail-fast: false - steps: - - uses: actions/checkout@v4 - - name: Disable rustup update (issue workaround for Windows) - run: rustup set auto-self-update disable - if: contains(runner.os, 'windows') - shell: bash - - name: Setup Rust toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2022-06-22 - target: ${{ matrix.target }} - override: true - components: rustfmt - - uses: Swatinem/rust-cache@v2 - with: - workspaces: ./rust/cubestore -> target - prefix-key: v0-rust-cubestore-cross - key: target-${{ matrix.target }} - - run: source .github/actions/${{ matrix.before_script }}.sh - if: ${{ matrix.before_script }} - shell: bash - #- name: Install dependencies Windows - # run: vcpkg integrate install; vcpkg install openssl:x64-windows - # if: matrix.os == 'windows-2019' - # env: - # VCPKG_ROOT: 'C:\vcpkg' - - name: Set Env Variables for Darwin - uses: allenevans/set-env@v3.0.0 - if: ${{ matrix.target == 'x86_64-apple-darwin' }} - with: - OPENSSL_STATIC: "true" - - name: Build with Cross - if: ${{ matrix.cross }} - run: | - wget -c https://github.com/rust-embedded/cross/releases/download/v0.2.1/cross-v0.2.1-x86_64-unknown-linux-gnu.tar.gz -O - | tar -xz - chmod +x cross && sudo mv cross /usr/local/bin/cross - cd rust/cubestore - cross build --release --target=${{ matrix.target }} - - name: Build with Cargo - if: ${{ !matrix.cross }} - run: | - cd rust/cubestore && cargo build --release --target=${{ matrix.target }} - - name: Compress binaries - uses: svenstaro/upx-action@v2 - if: ${{ matrix.compress }} - with: - file: rust/cubestore/target/${{ matrix.target }}/release/${{ matrix.executable_name }} - args: --lzma - strip: ${{ matrix.strip }} - - name: Create archive for release - run: | - mkdir cubestore-archive - mkdir cubestore-archive/bin - mv rust/cubestore/target/${{ matrix.target }}/release/${{ matrix.executable_name }} cubestore-archive/bin/${{ matrix.executable_name }} - cd cubestore-archive - tar cv * | gzip --best > cubestored-${{ matrix.target }}.tar.gz diff --git a/.github/workflows/rust-cubestore.yml b/.github/workflows/rust-cubestore.yml deleted file mode 100644 index 7b7a9d942669c..0000000000000 --- a/.github/workflows/rust-cubestore.yml +++ /dev/null @@ -1,248 +0,0 @@ -name: Rust - -on: - push: - paths: - - '.github/workflows/rust-cubestore.yml' - - 'rust/cubestore/**' - branches-ignore: - - master - pull_request: - paths: - - '.github/workflows/rust-cubestore.yml' - - 'rust/cubestore/**' - -jobs: - debian: - name: Debian Rust ${{ matrix.rust }} - # 22.04 has gcc 11, new binutils (ld) - runs-on: ubuntu-22.04 - timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - rust: [nightly-2022-06-22] - env: - RUST: ${{ matrix.rust }} - steps: - - name: Maximize build space (disk space limitations) - run: | - echo "Before" - df -h - sudo apt-get remove -y 'php.*' - sudo apt-get remove -y '^mongodb-.*' - sudo apt-get remove -y '^mysql-.*' - sudo apt-get autoremove -y - sudo apt-get clean - - sudo rm -rf /usr/share/dotnet - sudo rm -rf /usr/local/lib/android - sudo rm -rf /opt/ghc - sudo rm -rf /opt/hostedtoolcache/CodeQL - echo "After" - df -h - - name: Checkout - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: ${{ matrix.rust }} - override: true - components: rustfmt - - uses: Swatinem/rust-cache@v2 - with: - workspaces: ./rust/cubestore -> target - # We should use a separate key for testing to pass disk space limitations - shared-key: cubestore-testing - key: ubuntu-22.04 - - name: Run cargo fmt cubestore - uses: actions-rs/cargo@v1 - with: - command: fmt - args: --manifest-path rust/cubestore/cubestore/Cargo.toml -- --check - - name: Run cargo fmt cubehll - uses: actions-rs/cargo@v1 - with: - command: fmt - args: --manifest-path rust/cubestore/cubehll/Cargo.toml -- --check - - name: Run cargo build - uses: actions-rs/cargo@v1 - with: - command: build - args: --manifest-path rust/cubestore/Cargo.toml -j 4 - - name: Run cargo test - uses: actions-rs/cargo@v1 - with: - command: test - args: --manifest-path rust/cubestore/Cargo.toml -j 1 - - docker-image-latest: - name: Build only :latest image - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - include: - - os: ubuntu-20.04 - target: x86_64-unknown-linux-gnu - platforms: linux/amd64 - build-args: WITH_AVX2=1 - - os: self-hosted - target: aarch64-unknown-linux-gnu - platforms: linux/arm64 - build-args: WITH_AVX2=0 - timeout-minutes: 60 - if: github.ref != 'refs/heads/master' - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - with: - version: v0.9.1 - - name: Cache Docker layers - uses: actions/cache@v3 - with: - path: /tmp/.buildx-cache - key: ${{ runner.os }}-${{ matrix.target }}-buildx-${{ github.sha }} - restore-keys: | - ${{ runner.os }}-${{ matrix.target }}-buildx- - - name: Build only - uses: docker/build-push-action@v3 - with: - context: ./rust/cubestore/ - file: ./rust/cubestore/Dockerfile - platforms: ${{ matrix.platforms }} - build-args: ${{ matrix.build-args }} - push: false - cache-from: type=local,src=/tmp/.buildx-cache - cache-to: type=local,dest=/tmp/.buildx-cache - - cross: - runs-on: ${{ matrix.os }} - timeout-minutes: 90 - env: - RUSTFLAGS: '-Ctarget-feature=+crt-static' - OPENSSL_STATIC: 1 - strategy: - matrix: - target: - - x86_64-unknown-linux-gnu - - x86_64-unknown-linux-musl - - x86_64-pc-windows-msvc - - x86_64-apple-darwin - - aarch64-unknown-linux-gnu - include: - - os: ubuntu-20.04 - target: x86_64-unknown-linux-gnu - executable_name: cubestored - cross: true - strip: true - compress: false - - os: ubuntu-20.04 - target: x86_64-unknown-linux-musl - executable_name: cubestored - cross: true - strip: true - # cubestored: CantPackException: bad DT_HASH nbucket=0x344 len=0x1890 - compress: false - - os: windows-2019 - target: x86_64-pc-windows-msvc - executable_name: cubestored.exe - cross: false - strip: true - # cubestored.exe: CantPackException: superfluous data between sections - compress: false - # Please use minimal possible version of macOS, because it produces constraint on libstdc++ - - os: macos-11 - target: x86_64-apple-darwin - executable_name: cubestored - cross: false - strip: true - compress: true - - os: ubuntu-20.04 - target: aarch64-unknown-linux-gnu - executable_name: cubestored - cross: true - # Unable to recognise the format of the input file `rust/cubestore/target/aarch64-unknown-linux-gnu/release/cubestored' - strip: false - # UPX is broken, issue https://github.com/cube-js/cube/issues/4474 - compress: false - fail-fast: false - steps: - - uses: actions/checkout@v4 - - name: Disable rustup update (issue workaround for Windows) - run: rustup set auto-self-update disable - if: contains(runner.os, 'windows') - shell: bash - - name: Setup Rust toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: nightly-2022-06-22 - target: ${{ matrix.target }} - override: true - components: rustfmt - - uses: Swatinem/rust-cache@v2 - with: - workspaces: ./rust/cubestore -> target - prefix-key: v0-rust-cubestore-cross - key: target-${{ matrix.target }} - - run: source .github/actions/${{ matrix.before_script }}.sh - if: ${{ matrix.before_script }} - shell: bash - - uses: ilammy/msvc-dev-cmd@v1 - if: ${{ startsWith(matrix.os, 'windows') }} - - name: Install OpenSSL for Windows - if: ${{ startsWith(matrix.os, 'windows') }} - run: vcpkg integrate install; vcpkg install openssl:x64-windows - - name: Instal LLVM for Windows - if: ${{ startsWith(matrix.os, 'windows') }} - run: choco install -y --force llvm --version 9.0.1 - - name: Set Env Variables for Windows - uses: allenevans/set-env@v3.0.0 - if: ${{ startsWith(matrix.os, 'windows') }} - with: - OPENSSL_DIR: 'C:/vcpkg/packages/openssl_x64-windows' - # This paths are required to work with static linking - OPENSSL_LIB_DIR: 'C:/vcpkg/packages/openssl_x64-windows/lib' - OPENSSL_INCLUDE_DIR: 'C:/vcpkg/packages/openssl_x64-windows/include' - LIBCLANG_PATH: 'C:\Program Files\LLVM\bin' - # Hotfix before https://github.com/actions/runner-images/pull/7125 will be released/rolled on the productions servers - - name: Hotfix for macOS (pkg-config) - if: contains(runner.os, 'macos') - run: brew install pkg-config - - name: Build with Cross - if: ${{ matrix.cross }} - run: | - wget -c https://github.com/rust-embedded/cross/releases/download/v0.2.1/cross-v0.2.1-x86_64-unknown-linux-gnu.tar.gz -O - | tar -xz - chmod +x cross && sudo mv cross /usr/local/bin/cross - cd rust/cubestore - cross build --release --target=${{ matrix.target }} - - name: Build with Cargo - if: ${{ !matrix.cross }} - run: | - cd rust/cubestore && cargo build --release --target=${{ matrix.target }} - - name: Compress binaries - uses: svenstaro/upx-action@v2 - if: ${{ matrix.compress }} - with: - file: rust/cubestore/target/${{ matrix.target }}/release/${{ matrix.executable_name }} - args: --lzma - strip: ${{ matrix.strip }} - - name: Create folder for archive - run: | - mkdir cubestore-archive - mkdir cubestore-archive/bin - - name: Copy/paste OpenSSL to Archive (hotfix for Windows) - if: ${{ startsWith(matrix.os, 'windows') }} - run: cp C:/vcpkg/packages/openssl_x64-windows/bin/*.dll cubestore-archive/bin - - name: Create archive for release - run: | - mv rust/cubestore/target/${{ matrix.target }}/release/${{ matrix.executable_name }} cubestore-archive/bin/${{ matrix.executable_name }} - cd cubestore-archive - tar -cvzf cubestored-${{ matrix.target }}.tar.gz * - - uses: actions/upload-artifact@v2 - with: - path: cubestore-archive/cubestored-${{ matrix.target }}.tar.gz - name: cubestored-${{ matrix.target }}.tar.gz - retention-days: 1 diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000000..ff094a9de7f88 --- /dev/null +++ b/Makefile @@ -0,0 +1,21 @@ +CUBE_VERSION=$(shell node -e "console.log(require('./packages/cubejs-docker/package.json').version);") +GIT_REV := $(shell git rev-parse --short HEAD) +DIRTY_FLAG := $(shell git diff HEAD --quiet || echo '-dirty') +IMAGE_VERSION=${CUBE_VERSION}-${GIT_REV}${DIRTY_FLAG} + +IMAGE=889818756387.dkr.ecr.us-east-1.amazonaws.com/incognia/cube:${IMAGE_VERSION} +CUBESTORE_IMAGE=889818756387.dkr.ecr.us-east-1.amazonaws.com/incognia/cubestore:${IMAGE_VERSION} + +.PHONY: build push cubestore/build cubestore/push + +build: cubestore/build + docker build -t ${IMAGE} . -f incognia.Dockerfile --build-arg IMAGE_VERSION=${IMAGE_VERSION} + +cubestore/build: + docker build -t ${CUBESTORE_IMAGE} rust/cubestore/ + +cubestore/push: + docker push ${CUBESTORE_IMAGE} + +push: build cubestore/push + docker push ${IMAGE} diff --git a/README.md b/README.md index 66cccc49fdf35..a2dbdb445b904 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,8 @@ +## Incognia Fork Note +This project is a fork of Cubejs's Cube, adding some patches that are important for the Incognia use case. + +Before starting the Cube update process, please take a moment to review [this document](https://www.notion.so/inlocoglobal/Cube-API-and-Cubestore-base-update-process-and-obstacles-92589ff0ca2643ad9b3a664162e1ecbd?pvs=4). It outlines the steps required for the update and highlights some obstacles that have been encountered during previous updates. +

Cube — Semantic Layer for Data Applications

diff --git a/examples/mapbox/dashboard-app/yarn.lock b/examples/mapbox/dashboard-app/yarn.lock index 831ec73b63a7a..34f00e8dc2dda 100644 --- a/examples/mapbox/dashboard-app/yarn.lock +++ b/examples/mapbox/dashboard-app/yarn.lock @@ -4730,9 +4730,9 @@ ee-first@1.1.1: integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== ejs@^3.1.6: - version "3.1.8" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.8.tgz#758d32910c78047585c7ef1f92f9ee041c1c190b" - integrity sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ== + version "3.1.10" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.10.tgz#69ab8358b14e896f80cc39e62087b88500c3ac3b" + integrity sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA== dependencies: jake "^10.8.5" diff --git a/incognia.Dockerfile b/incognia.Dockerfile new file mode 100644 index 0000000000000..a6c675e9e34b6 --- /dev/null +++ b/incognia.Dockerfile @@ -0,0 +1,75 @@ +FROM node:16.20.1-bullseye-slim AS base + +WORKDIR /cube + +COPY package.json . +COPY lerna.json . +COPY yarn.lock . +COPY tsconfig.base.json . + +ENV CUBESTORE_SKIP_POST_INSTALL=true + +COPY packages/cubejs-backend-shared/package.json packages/cubejs-backend-shared/package.json +COPY packages/cubejs-base-driver/package.json packages/cubejs-base-driver/package.json +COPY packages/cubejs-backend-native/package.json packages/cubejs-backend-native/package.json +COPY packages/cubejs-api-gateway/package.json packages/cubejs-api-gateway/package.json +COPY packages/cubejs-cli/package.json packages/cubejs-cli/package.json +COPY packages/cubejs-cubestore-driver/package.json packages/cubejs-cubestore-driver/package.json +COPY packages/cubejs-prestodb-driver/package.json packages/cubejs-prestodb-driver/package.json +COPY packages/cubejs-trino-driver/package.json packages/cubejs-trino-driver/package.json +COPY packages/cubejs-query-orchestrator/package.json packages/cubejs-query-orchestrator/package.json +COPY packages/cubejs-schema-compiler/package.json packages/cubejs-schema-compiler/package.json +COPY packages/cubejs-server/package.json packages/cubejs-server/package.json +COPY packages/cubejs-server-core/package.json packages/cubejs-server-core/package.json + +FROM base AS prod_deps + +# Use yarn v2+ because of https://github.com/yarnpkg/yarn/issues/6323 +# The yarn versions prior to 1.22.20 have a bug that it always download +# the latest release, so first we manually go to the 4.1.1 before going +# to the target version. +RUN YARN_IGNORE_NODE=1 yarn set version 4.1.1 +RUN YARN_IGNORE_NODE=1 yarn set version 3.8.1 +RUN yarn plugin import workspace-tools +RUN yarn config set nodeLinker node-modules +RUN yarn workspaces focus --production --all + +FROM base AS builder + +COPY packages/cubejs-backend-shared/ packages/cubejs-backend-shared/ +COPY packages/cubejs-base-driver/ packages/cubejs-base-driver/ +COPY packages/cubejs-backend-native/ packages/cubejs-backend-native/ +COPY packages/cubejs-api-gateway/ packages/cubejs-api-gateway/ +COPY packages/cubejs-cli/ packages/cubejs-cli/ +COPY packages/cubejs-cubestore-driver/ packages/cubejs-cubestore-driver/ +COPY packages/cubejs-prestodb-driver/ packages/cubejs-prestodb-driver/ +COPY packages/cubejs-trino-driver/ packages/cubejs-trino-driver/ +COPY packages/cubejs-query-orchestrator/ packages/cubejs-query-orchestrator/ +COPY packages/cubejs-schema-compiler/ packages/cubejs-schema-compiler/ +COPY packages/cubejs-server/ packages/cubejs-server/ +COPY packages/cubejs-server-core/ packages/cubejs-server-core/ + +RUN yarn install +RUN yarn lerna run build + +RUN find . -name 'node_modules' -type d -prune -exec rm -rf '{}' + + +FROM base AS final +ARG IMAGE_VERSION=unknown + +COPY --from=builder /cube . +COPY --from=prod_deps /cube . + +ENV CUBEJS_DOCKER_IMAGE_TAG=latest +ENV CUBEJS_DOCKER_IMAGE_VERSION=$IMAGE_VERSION + +ENV NODE_ENV production +ENV NODE_PATH /cube/conf/node_modules:/cube/node_modules +# I'm not sure why yarn is not automatically creating this bin file, but we just do it manually here +RUN chmod +x /cube/packages/cubejs-cli/dist/src/index.js && ln -s /cube/packages/cubejs-cli/dist/src/index.js /usr/local/bin/cubejs + +WORKDIR /cube/conf + +EXPOSE 4000 + +CMD ["cubejs", "server"] diff --git a/packages/cubejs-backend-shared/src/env.ts b/packages/cubejs-backend-shared/src/env.ts index 83dd2609e53e2..646be515dfa8b 100644 --- a/packages/cubejs-backend-shared/src/env.ts +++ b/packages/cubejs-backend-shared/src/env.ts @@ -1538,6 +1538,56 @@ const variables: Record any> = { ] ), + prestoAwsRegion: ({ + dataSource, + }: { + dataSource: string, + }) => ( + process.env[ + keyByDataSource('CUBEJS_DB_PRESTO_AWS_REGION', dataSource) + ] + ), + + prestoUnloadBucket: ({ + dataSource, + }: { + dataSource: string, + }) => ( + process.env[ + keyByDataSource('CUBEJS_DB_PRESTO_UNLOAD_BUCKET', dataSource) + ] + ), + + prestoUnloadPrefix: ({ + dataSource, + }: { + dataSource: string, + }) => ( + process.env[ + keyByDataSource('CUBEJS_DB_PRESTO_UNLOAD_PREFIX', dataSource) + ] + ), + + prestoUnloadCatalog: ({ + dataSource, + }: { + dataSource: string, + }) => ( + process.env[ + keyByDataSource('CUBEJS_DB_PRESTO_UNLOAD_CATALOG', dataSource) + ] + ), + + prestoUnloadSchema: ({ + dataSource, + }: { + dataSource: string, + }) => ( + process.env[ + keyByDataSource('CUBEJS_DB_PRESTO_UNLOAD_SCHEMA', dataSource) + ] + ), + /** **************************************************************** * Cube Store Driver * ***************************************************************** */ diff --git a/packages/cubejs-cli/docker-compose.yml b/packages/cubejs-cli/docker-compose.yml new file mode 100644 index 0000000000000..21cbeba119975 --- /dev/null +++ b/packages/cubejs-cli/docker-compose.yml @@ -0,0 +1,20 @@ +version: '3.1' +services: + db: + image: postgres:16 + container_name: "postgres" + restart: always + environment: + - POSTGRES_PASSWORD=postgres + - POSTGRES_USER=postgres + - POSTGRES_HOST=postgres + - POSTGRES_DB=postgres + volumes: + - ./mock/docker/dataset:/docker-entrypoint-initdb.d + ports: + - 5454:5432 + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 5s + retries: 5 diff --git a/packages/cubejs-cli/mock/docker/dataset/events.csv b/packages/cubejs-cli/mock/docker/dataset/events.csv new file mode 100644 index 0000000000000..2c46bab428917 --- /dev/null +++ b/packages/cubejs-cli/mock/docker/dataset/events.csv @@ -0,0 +1,4 @@ +dt,id +2024-03-01,1 +2024-03-01,2 +2024-03-02,3 diff --git a/packages/cubejs-cli/mock/docker/dataset/init.sql b/packages/cubejs-cli/mock/docker/dataset/init.sql new file mode 100644 index 0000000000000..8ef16f65435cd --- /dev/null +++ b/packages/cubejs-cli/mock/docker/dataset/init.sql @@ -0,0 +1,9 @@ +CREATE TABLE events ( + dt TEXT, + id INT +); + +COPY events +FROM '/docker-entrypoint-initdb.d/events.csv' +DELIMITER ',' +CSV HEADER; \ No newline at end of file diff --git a/packages/cubejs-cli/mock/model/cubes/bar.js b/packages/cubejs-cli/mock/model/cubes/bar.js new file mode 100644 index 0000000000000..cf3f77bd7ce00 --- /dev/null +++ b/packages/cubejs-cli/mock/model/cubes/bar.js @@ -0,0 +1,30 @@ +cube('Bar', { + data_source: `postgres`, + sql_table: `events`, + + dimensions: { + dt: { + sql: `dt`, + type: `string`, + primary_key: true + }, + + id: { + sql: `id`, + type: `number`, + primary_key: true + }, + }, + + pre_aggregations: { + main: { + dimensions: [ + CUBE.dt, + CUBE.id, + ], + refreshKey: { + every: `1 second`, + } + } + } +}); \ No newline at end of file diff --git a/packages/cubejs-cli/src/cli.ts b/packages/cubejs-cli/src/cli.ts index 33ad674b95d07..07dc7bf0cea58 100644 --- a/packages/cubejs-cli/src/cli.ts +++ b/packages/cubejs-cli/src/cli.ts @@ -9,6 +9,7 @@ import { configureTypegenCommand } from './command/typegen'; import { configureAuthCommand } from './command/auth'; import { loadCliManifest } from './utils'; import { configureValidateCommand } from './command/validate'; +import { configureAggregationCommand } from './command/aggregation-warmup'; const packageJson = loadCliManifest(); @@ -32,6 +33,7 @@ program await configureDeployCommand(program); await configureServerCommand(program); await configureValidateCommand(program); + await configureAggregationCommand(program); if (!process.argv.slice(2).length) { program.help(); diff --git a/packages/cubejs-cli/src/command/aggregation-warmup.ts b/packages/cubejs-cli/src/command/aggregation-warmup.ts new file mode 100644 index 0000000000000..e10b6f673bdb8 --- /dev/null +++ b/packages/cubejs-cli/src/command/aggregation-warmup.ts @@ -0,0 +1,62 @@ +import { CommanderStatic } from 'commander'; +import { isDockerImage, requireFromPackage, packageExists, getEnv } from '@cubejs-backend/shared'; +import { displayError } from '../utils'; +import type { ServerContainer as ServerContainerType } from '@cubejs-backend/server'; + +async function aggregationWarmup(options) : Promise { + const relative = isDockerImage(); + + if (!packageExists('@cubejs-backend/server', relative)) { + await displayError( + '@cubejs-backend/server dependency not found. Please run generate command from project directory.'); + } + + const serverPackage = requireFromPackage<{ ServerContainer: any }>( + '@cubejs-backend/server', + { + relative, + } + ); + + if (!serverPackage.ServerContainer) { + await displayError( '@cubejs-backend/server is too old. Please use @cubejs-backend/server >= v0.26.11') + } + + const container: ServerContainerType = new serverPackage.ServerContainer({ debug: false }); + const configuration = await container.lookupConfiguration(); + const server = await container.runServerInstance( + configuration, + true, + Object.keys(configuration).length === 0 + ); + + let queryIteratorState = {} + let exit = 1; + + for (; ;) { + try { + const { finished } = await server.runScheduledRefresh({}, { + concurrency: configuration.scheduledRefreshConcurrency, queryIteratorState, preAggregationsWarmup: true, throwErrors: true, + }); + + if (finished) { + exit = 0; + break; + } + } catch (e: any) { + if (e.error != "Continue wait") { + displayError(`Something went wrong refreshing aggregations ${JSON.stringify(e)}`) + break; + } + } + } + + await server.shutdown("", true) + console.log("Server shutdown done") + process.exit(exit) +} + +export function configureAggregationCommand(program: CommanderStatic) { + program.command("aggregation-warmup").action((options) => aggregationWarmup(options) + .catch(e => displayError(e.stack || e))) +} diff --git a/packages/cubejs-cli/test/cli.test.ts b/packages/cubejs-cli/test/cli.test.ts new file mode 100644 index 0000000000000..644e76cb7fd8b --- /dev/null +++ b/packages/cubejs-cli/test/cli.test.ts @@ -0,0 +1,60 @@ +const path = require('path'); +const { exec, ChildProcess } = require('child_process'); +const { DockerComposeEnvironment, Wait } = require('testcontainers'); + +describe('cli', () => { + jest.setTimeout(6 * 60 * 1000); + + let env: any; + + // eslint-disable-next-line consistent-return,func-names + beforeAll(async () => { + const dc = new DockerComposeEnvironment( + path.resolve(path.dirname(__filename), '../../'), + 'docker-compose.yml' + ); + + env = await dc + .withStartupTimeout(240 * 1000) + .withWaitStrategy('postgres', Wait.forHealthCheck()) + .up(); + }); + + // eslint-disable-next-line consistent-return,func-names + afterAll(async () => { + if (env) { + await env.down(); + } + }); + + it('aggregation-warmup command', async () => { + let result = await cli( + [ + 'CUBEJS_DEV_MODE=true', + 'CUBEJS_SCHEMA_PATH=mock/model', + 'CUBEJS_DB_TYPE=postgres', + 'CUBEJS_DB_HOST=localhost', + 'CUBEJS_DB_PORT=5454', + 'CUBEJS_DB_NAME=postgres', + 'CUBEJS_DB_USER=postgres', + 'CUBEJS_DB_PASS=postgres' + ], + ['aggregation-warmup'], + '.' + ); + console.log(result.stdout); + expect(result.code).toBe(0); + }); + + function cli(exports, args, cwd): Promise { + return new Promise(resolve => { + exec(`${exports.join(' ')} node ${path.resolve('./dist/src/cli')} ${args.join(' ')}`, + { cwd }, + (error, stdout, stderr) => { resolve({ + code: error && error.code ? error.code : 0, + error, + stdout, + stderr }) + }) + })} +}); diff --git a/packages/cubejs-prestodb-driver/package.json b/packages/cubejs-prestodb-driver/package.json index 3f6bf6ce10149..72c09ef6875d9 100644 --- a/packages/cubejs-prestodb-driver/package.json +++ b/packages/cubejs-prestodb-driver/package.json @@ -29,6 +29,8 @@ "dependencies": { "@cubejs-backend/base-driver": "^0.34.60", "@cubejs-backend/shared": "^0.34.60", + "@aws-sdk/client-s3": "^3.49.0", + "@aws-sdk/s3-request-presigner": "^3.49.0", "presto-client": "^0.12.2", "ramda": "^0.27.0", "sqlstring": "^2.3.1" diff --git a/packages/cubejs-prestodb-driver/src/PrestoDriver.ts b/packages/cubejs-prestodb-driver/src/PrestoDriver.ts index 1989bec444bfb..94a2b2534fc9b 100644 --- a/packages/cubejs-prestodb-driver/src/PrestoDriver.ts +++ b/packages/cubejs-prestodb-driver/src/PrestoDriver.ts @@ -10,7 +10,9 @@ import { StreamOptions, StreamTableData, TableStructure, - BaseDriver + BaseDriver, + UnloadOptions, + DownloadTableCSVData, } from '@cubejs-backend/base-driver'; import { getEnv, @@ -24,6 +26,8 @@ import { map, zipObj, prop, concat } from 'ramda'; import SqlString from 'sqlstring'; +import { S3, GetObjectCommand } from '@aws-sdk/client-s3'; +import { getSignedUrl } from '@aws-sdk/s3-request-presigner'; const presto = require('presto-client'); @@ -38,6 +42,12 @@ export type PrestoDriverConfiguration = { ssl?: string | TLSConnectionOptions; dataSource?: string; queryTimeout?: number; + unloadCatalog?: string; + unloadSchema?: string; + unloadBucket?: string; + unloadPrefix?: string; + region?: string; + exportBucketCsvEscapeSymbol?: string }; /** @@ -84,6 +94,11 @@ export class PrestoDriver extends BaseDriver implements DriverInterface { } : undefined, ssl: this.getSslOptions(dataSource), + region: config.region || getEnv('prestoAwsRegion', { dataSource }), + unloadBucket: config.unloadBucket || getEnv('prestoUnloadBucket', { dataSource }), + unloadPrefix: config.unloadPrefix || getEnv('prestoUnloadPrefix', { dataSource }), + unloadCatalog: config.unloadCatalog || getEnv('prestoUnloadCatalog', { dataSource }), + unloadSchema: config.unloadSchema || getEnv('prestoUnloadSchema', { dataSource }), ...config }; this.catalog = this.config.catalog; @@ -101,6 +116,100 @@ export class PrestoDriver extends BaseDriver implements DriverInterface { }); } + public async isUnloadSupported() { + return this.config.unloadBucket !== undefined + && this.config.unloadPrefix !== undefined + && this.config.unloadCatalog !== undefined + && this.config.unloadSchema !== undefined; + } + + public async unload(tableName: string, options: UnloadOptions): Promise { + /* + "tableName" is a bit misleading since it also includes schema name. Ex: dev_pre_aggregations.your_table_name, + if using this name directly on trino, remember to quote it like its done with CREATE TABLE AS query for unloading + */ + const columns = await this.unloadWithSql(tableName, options) + const files = await this.getCsvFiles(tableName) + + return { + csvFile: files, + types: columns, + csvNoHeader: true, + csvDelimiter: '^A' + } + } + + private async unloadWithSql( + tableName: string, + unloadOptions: UnloadOptions,): Promise { + const unloadSchema = this.config.unloadSchema!; + const unloadCatalog = this.config.unloadCatalog!; + const trinoTable = `${unloadCatalog}.${unloadSchema}."${tableName}"` + + const dropIfExistsSql = /* sql */` + DROP TABLE IF EXISTS ${trinoTable} + ` + await this.query(dropIfExistsSql, []) + + const unloadSql = /* sql */` + CREATE TABLE ${unloadCatalog}.${unloadSchema}."${tableName}" + WITH (FORMAT='TEXTFILE') AS ${unloadOptions.query!.sql} + ` + await this.query(unloadSql, unloadOptions.query!.params) + const columns = await this.tableColumns(unloadCatalog, unloadSchema, tableName) + + return columns; + } + + /* + This is based on on super.tableColumnTypes. The problem with the original method + was that it assumed that tableName did not contain dots and it extracted the schema + from there. Also it didn't consider trino's catalog. + */ + private async tableColumns(catalog: string, schema: string, table: string): Promise { + const columns = await this.query( + `SELECT columns.column_name as ${this.quoteIdentifier('column_name')}, + columns.table_name as ${this.quoteIdentifier('table_name')}, + columns.table_schema as ${this.quoteIdentifier('table_schema')}, + columns.data_type as ${this.quoteIdentifier('data_type')} + FROM information_schema.columns + WHERE table_catalog = ${this.param(0)} AND table_schema = ${this.param(1)} AND table_name = ${this.param(2)}`, + [catalog, schema, table] + ); + + return columns.map(c => ({ name: c.column_name, type: this.toGenericType(c.data_type) })); + } + + /** + * Returns an array of signed URLs of the unloaded csv files. + * + * Copied from athena driver + */ + public async getCsvFiles(tableName: string): Promise { + const client = new S3({ + region: this.config.region!, + }); + const list = await client.listObjectsV2({ + Bucket: this.config.unloadBucket!, + Prefix: `${this.config.unloadPrefix}/${tableName}`, + }); + if (!list.Contents) { + return []; + } else { + const files = await Promise.all( + list.Contents.map(async (file) => { + const command = new GetObjectCommand({ + Bucket: this.config.unloadBucket, + Key: file.Key, + }); + return getSignedUrl(client, command, { expiresIn: 3600 }); + }) + ); + + return files; + } + } + public query(query: string, values: unknown[]): Promise { return > this.queryPromised(this.prepareQueryWithParams(query, values), false); } diff --git a/rust/cubestore/Cargo.lock b/rust/cubestore/Cargo.lock index e7c81ead4471e..8542d58e88b13 100644 --- a/rust/cubestore/Cargo.lock +++ b/rust/cubestore/Cargo.lock @@ -1342,7 +1342,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" [[package]] name = "datafusion" version = "4.0.0-SNAPSHOT" -source = "git+https://github.com/cube-js/arrow-datafusion?branch=cube#4998c7b1ca9d66efc6369fda2bf9ca409b8e1d94" +source = "git+https://github.com/igorcalabria/arrow-datafusion.git?branch=fix-group-by-null-columns#c02d9715b6c77aaf7d752edee6f3b382bc8bfa2b" dependencies = [ "ahash 0.7.4", "arrow", diff --git a/rust/cubestore/cubestore/Cargo.toml b/rust/cubestore/cubestore/Cargo.toml index ecf75e81c1058..85dd26ed46c25 100644 --- a/rust/cubestore/cubestore/Cargo.toml +++ b/rust/cubestore/cubestore/Cargo.toml @@ -29,7 +29,7 @@ cuberpc = { path = "../cuberpc" } parquet = { git = "https://github.com/cube-js/arrow-rs", branch = "cube", features = ["arrow"] } arrow = { git = "https://github.com/cube-js/arrow-rs", branch = "cube" } arrow-flight = { git = "https://github.com/cube-js/arrow-rs", branch = "cube" } -datafusion = { git = "https://github.com/cube-js/arrow-datafusion", branch = "cube", features = ["default_nulls_last"] } +datafusion = { git = "https://github.com/igorcalabria/arrow-datafusion.git", branch = "fix-group-by-null-columns", features = ["default_nulls_last"] } csv = "1.1.3" bytes = "0.5.4" serde_json = "1.0.56" diff --git a/rust/cubestore/cubestore/src/import/mod.rs b/rust/cubestore/cubestore/src/import/mod.rs index d2f907c88428c..8558fa07a80d6 100644 --- a/rust/cubestore/cubestore/src/import/mod.rs +++ b/rust/cubestore/cubestore/src/import/mod.rs @@ -318,51 +318,16 @@ impl<'a> CsvLineParser<'a> { } fn next_value(&mut self) -> Result { - Ok( - if let Some(b'"') = self.remaining.as_bytes().iter().nth(0) { - let mut closing_index = None; - let mut seen_escapes = false; - self.remaining = &self.remaining[1..]; - let mut first_quote_index = None; - for (i, c) in self.remaining.char_indices() { - if c == '"' && first_quote_index.is_some() { - seen_escapes = true; - first_quote_index = None; - } else if c == '"' { - first_quote_index = Some(i); - } else if first_quote_index.is_some() { - closing_index = first_quote_index.take(); - break; - } - } - if first_quote_index.is_some() { - closing_index = first_quote_index.take(); - } - let closing_index = closing_index.ok_or(CubeError::user(format!( - "Malformed CSV string: {}", - self.line - )))?; - let res; - if seen_escapes { - let unescaped = self.remaining[0..closing_index].replace("\"\"", "\""); - res = MaybeOwnedStr::Owned(unescaped) - } else { - res = MaybeOwnedStr::Borrowed(&self.remaining[0..closing_index]) - } - self.remaining = self.remaining[(closing_index + 1)..].as_ref(); - res - } else { - let next_comma = self - .remaining - .as_bytes() - .iter() - .position(|c| *c == self.delimiter) - .unwrap_or(self.remaining.len()); - let res = &self.remaining[0..next_comma]; - self.remaining = self.remaining[next_comma..].as_ref(); - MaybeOwnedStr::Borrowed(res) - }, - ) + let next_comma = self + .remaining + .as_bytes() + .iter() + .position(|c| *c == self.delimiter) + .unwrap_or(self.remaining.len()); + let res = &self.remaining[0..next_comma]; + self.remaining = self.remaining[next_comma..].as_ref(); + + return Ok(MaybeOwnedStr::Borrowed(res)); } fn advance(&mut self) -> Result<(), CubeError> { @@ -426,9 +391,7 @@ impl Stream for CsvLineStream { } else { let new_line_pos = memchr::memchr(b'\n', available); let quote_pos = memchr::memchr(b'"', available); - let in_quotes = quote_pos.is_some() - && (new_line_pos.is_some() && quote_pos < new_line_pos - || new_line_pos.is_none()); + let in_quotes = false; if in_quotes { if let Some(i) = quote_pos { projected.buf.extend_from_slice(&available[..=i]);