diff --git a/.github/actions/build-greptime-binary/action.yml b/.github/actions/build-greptime-binary/action.yml index 268b76275a8b..1baf6d6d99ba 100644 --- a/.github/actions/build-greptime-binary/action.yml +++ b/.github/actions/build-greptime-binary/action.yml @@ -9,7 +9,7 @@ inputs: required: true cargo-profile: description: Cargo profile to build - required: true + required: false artifacts-dir: description: Directory to store artifacts required: true @@ -37,14 +37,21 @@ runs: FEATURES=${{ inputs.features }} \ BASE_IMAGE=${{ inputs.base-image }} + - name: Pack GreptimeDB binaries + shell: bash + env: + # If inputs.cargo-profile is empty, we will use 'debug' as default. + PROFILE_TARGET: ${{ inputs.cargo-profile == '' && 'debug' || inputs.cargo-profile }} + run: | + mkdir -p ${{ inputs.artifacts-dir }} && \ + sudo mv ./target/$PROFILE_TARGET/greptime ${{ inputs.artifacts-dir }} && \ + sudo mv ./target/$PROFILE_TARGET/sqlness-runner ${{ inputs.artifacts-dir }} + - name: Upload artifacts uses: ./.github/actions/upload-artifacts if: ${{ inputs.build-android-artifacts == 'false' }} - env: - PROFILE_TARGET: ${{ inputs.cargo-profile == 'dev' && 'debug' || inputs.cargo-profile }} with: artifacts-dir: ${{ inputs.artifacts-dir }} - target-file: ./target/$PROFILE_TARGET/greptime version: ${{ inputs.version }} working-dir: ${{ inputs.working-dir }} diff --git a/.github/actions/upload-artifacts/action.yml b/.github/actions/upload-artifacts/action.yml index 4322fe7c0db2..97468582a701 100644 --- a/.github/actions/upload-artifacts/action.yml +++ b/.github/actions/upload-artifacts/action.yml @@ -4,9 +4,6 @@ inputs: artifacts-dir: description: Directory to store artifacts required: true - target-file: - description: The path of the target artifact - required: true version: description: Version of the artifact required: true @@ -17,13 +14,6 @@ inputs: runs: using: composite steps: - - name: Create artifacts directory - working-directory: ${{ inputs.working-dir }} - shell: bash - run: | - mkdir -p ${{ inputs.artifacts-dir }} && \ - cp ${{ inputs.target-file }} ${{ inputs.artifacts-dir }} - # The compressed artifacts will use the following layout: # greptime-linux-amd64-pyo3-v0.3.0sha256sum # greptime-linux-amd64-pyo3-v0.3.0.tar.gz @@ -51,13 +41,13 @@ runs: # Note: The artifacts will be double zip compressed(related issue: https://github.com/actions/upload-artifact/issues/39). # However, when we use 'actions/download-artifact@v3' to download the artifacts, it will be automatically unzipped. - name: Upload artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ inputs.artifacts-dir }} path: ${{ inputs.working-dir }}/${{ inputs.artifacts-dir }}.tar.gz - name: Upload checksum - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: ${{ inputs.artifacts-dir }}.sha256sum path: ${{ inputs.working-dir }}/${{ inputs.artifacts-dir }}.sha256sum diff --git a/.github/workflows/develop.yml b/.github/workflows/develop.yml index f58e9546a903..5a54ab35394e 100644 --- a/.github/workflows/develop.yml +++ b/.github/workflows/develop.yml @@ -76,9 +76,29 @@ jobs: - name: Run taplo run: taplo format --check + build: + name: Build GreptimeDB binaries + if: github.event.pull_request.draft == false + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ ubuntu-20.04-8-cores ] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - uses: ./.github/actions/build-greptime-binary + with: + base-image: ubuntu + features: '' + artifacts-dir: bins + version: current + - name: Print bins info + run: ls -lh ./bins + sqlness: name: Sqlness Test if: github.event.pull_request.draft == false + needs: build runs-on: ${{ matrix.os }} strategy: matrix: @@ -86,16 +106,18 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v3 - - uses: arduino/setup-protoc@v1 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - - uses: dtolnay/rust-toolchain@master - with: - toolchain: ${{ env.RUST_TOOLCHAIN }} - - name: Rust Cache - uses: Swatinem/rust-cache@v2 + - name: Download pre-built binaries + uses: actions/download-artifact@v4 + with: + name: bins + path: ./bins + - name: Untar binaries + run: | + tar -xvf ./bins/bins.tar.gz -C ./bins && \ + pwd && \ + ls -lh ./bins - name: Run sqlness - run: cargo sqlness + run: GREPTIME_BINS_DIR=./bins ./bins/sqlness-runner -c ./tests/cases - name: Upload sqlness logs if: always() uses: actions/upload-artifact@v3 @@ -107,6 +129,7 @@ jobs: sqlness-kafka-wal: name: Sqlness Test with Kafka Wal if: github.event.pull_request.draft == false + needs: build runs-on: ${{ matrix.os }} strategy: matrix: @@ -114,19 +137,18 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v3 - - uses: arduino/setup-protoc@v1 + - name: Download pre-built binaries + uses: actions/download-artifact@v4 with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - - uses: dtolnay/rust-toolchain@master - with: - toolchain: ${{ env.RUST_TOOLCHAIN }} - - name: Rust Cache - uses: Swatinem/rust-cache@v2 + name: bins + path: ./bins + - name: Untar binaries + run: tar -xvf ./bins/bins.tar.gz -C ./bins - name: Setup kafka server working-directory: tests-integration/fixtures/kafka run: docker compose -f docker-compose-standalone.yml up -d --wait - name: Run sqlness - run: cargo sqlness -w kafka -k 127.0.0.1:9092 + run: GREPTIME_BINS_DIR=./bins ./bins/sqlness-runner -w kafka -k 127.0.0.1:9092 -c ./tests/cases - name: Upload sqlness logs if: always() uses: actions/upload-artifact@v3 diff --git a/docker/dev-builder/ubuntu/Dockerfile b/docker/dev-builder/ubuntu/Dockerfile index d78603234e43..1e0a902eea47 100644 --- a/docker/dev-builder/ubuntu/Dockerfile +++ b/docker/dev-builder/ubuntu/Dockerfile @@ -40,7 +40,7 @@ RUN apt-get -y purge python3.8 && \ # wildcard here. However, that requires the git's config files and the submodules all owned by the very same user. # It's troublesome to do this since the dev build runs in Docker, which is under user "root"; while outside the Docker, # it can be a different user that have prepared the submodules. -RUN git config --global --add safe.directory ‘*’ +RUN git config --global --add safe.directory * # Install Python dependencies. COPY $DOCKER_BUILD_ROOT/docker/python/requirements.txt /etc/greptime/requirements.txt diff --git a/tests/runner/src/env.rs b/tests/runner/src/env.rs index 8d1f189b7266..94aedfef2e01 100644 --- a/tests/runner/src/env.rs +++ b/tests/runner/src/env.rs @@ -57,6 +57,11 @@ pub struct Env { data_home: PathBuf, server_addr: Option, wal: WalConfig, + + /// The path to the directory that contains the pre-built GreptimeDB binary. + /// When running in CI, this is expected to be set. + /// If not set, this runner will build the GreptimeDB binary itself when needed, and set this field by then. + bins_dir: Arc>>, } #[async_trait] @@ -79,10 +84,12 @@ impl EnvController for Env { impl Env { pub fn new(data_home: PathBuf, server_addr: Option, wal: WalConfig) -> Self { + let bins_dir = std::env::var("GREPTIME_BINS_DIR").map(PathBuf::from).ok(); Self { data_home, server_addr, wal, + bins_dir: Arc::new(Mutex::new(bins_dir)), } } @@ -90,7 +97,7 @@ impl Env { if let Some(server_addr) = self.server_addr.clone() { self.connect_db(&server_addr) } else { - Self::build_db().await; + self.build_db(); self.setup_wal(); let db_ctx = GreptimeDBContext::new(self.wal.clone()); @@ -116,7 +123,7 @@ impl Env { if let Some(server_addr) = self.server_addr.clone() { self.connect_db(&server_addr) } else { - Self::build_db().await; + self.build_db(); self.setup_wal(); let db_ctx = GreptimeDBContext::new(self.wal.clone()); @@ -249,8 +256,12 @@ impl Env { #[cfg(windows)] let program = "greptime.exe"; + let bins_dir = self.bins_dir.lock().unwrap().clone().expect( + "GreptimeDB binary is not available. Please set the GREPTIME_BINS_DIR environment variable to the directory that contains the pre-built GreptimeDB binary. Or you may call `self.build_db()` beforehand.", + ); + let mut process = Command::new(program) - .current_dir(util::get_binary_dir("debug")) + .current_dir(bins_dir) .env("TZ", "UTC") .args(args) .stdout(log_file) @@ -374,7 +385,11 @@ impl Env { } /// Build the DB with `cargo build --bin greptime` - async fn build_db() { + fn build_db(&self) { + if self.bins_dir.lock().unwrap().is_some() { + return; + } + println!("Going to build the DB..."); let output = Command::new("cargo") .current_dir(util::get_workspace_root()) @@ -389,7 +404,12 @@ impl Env { io::stderr().write_all(&output.stderr).unwrap(); panic!(); } - println!("Build finished, starting..."); + + let _ = self + .bins_dir + .lock() + .unwrap() + .insert(util::get_binary_dir("debug")); } } diff --git a/tests/runner/src/util.rs b/tests/runner/src/util.rs index 71ae585a81ad..5ae63ede37c7 100644 --- a/tests/runner/src/util.rs +++ b/tests/runner/src/util.rs @@ -91,7 +91,7 @@ pub fn get_workspace_root() -> String { runner_crate_path.into_os_string().into_string().unwrap() } -pub fn get_binary_dir(mode: &str) -> String { +pub fn get_binary_dir(mode: &str) -> PathBuf { // first go to the workspace root. let mut workspace_root = PathBuf::from(get_workspace_root()); @@ -99,7 +99,7 @@ pub fn get_binary_dir(mode: &str) -> String { workspace_root.push("target"); workspace_root.push(mode); - workspace_root.into_os_string().into_string().unwrap() + workspace_root } /// Spin-waiting a socket address is available, or timeout.