diff --git a/.github/services/hdfs/hdfs_default_azblob/action.yml b/.github/services/hdfs/hdfs_default_azblob/action.yml
new file mode 100644
index 000000000000..6a377f29d665
--- /dev/null
+++ b/.github/services/hdfs/hdfs_default_azblob/action.yml
@@ -0,0 +1,71 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: hdfs_default_azblob
+description: 'Behavior test for hdfs default over azure blob storage'
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup java env
+ uses: actions/setup-java@v4
+ with:
+ distribution: temurin
+ java-version: "11"
+ - name: Load secrets
+ uses: 1password/load-secrets-action@v1
+ with:
+ export-env: true
+ env:
+ # Need a environment variable with this name to securely encrypt them - otherwise there will be warning
+ # See https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/CredentialProviderAPI.html#Keystore_Passwords
+ HADOOP_CREDSTORE_PASSWORD: op://services/hdfs/credstore_password
+ # TODO1: Do we have this as a secret
+ # TODO2: Does this start with / and ends with /
+ OPENDAL_AZBLOB_ROOT: op://services/azblob/root
+ OPENDAL_AZBLOB_CONTAINER: op://services/azblob/container
+ OPENDAL_AZBLOB_ACCOUNT_NAME: op://services/azblob/account_name
+ OPENDAL_AZBLOB_ACCOUNT_KEY: op://services/azblob/account_key
+ - name: Setup
+ shell: bash
+ run: |
+ curl -LsSf https://dlcdn.apache.org/hadoop/common/hadoop-3.3.5/hadoop-3.3.5.tar.gz | tar zxf - -C /home/runner
+
+ export HADOOP_HOME="/home/runner/hadoop-3.3.5"
+
+ curl -LsSf -o ${HADOOP_HOME}/share/hadoop/common/lib/hadoop-azure-3.3.5.jar https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure/3.3.5/hadoop-azure-3.3.5.jar
+ curl -LsSf -o ${HADOOP_HOME}/share/hadoop/common/lib/azure-storage-7.0.1.jar https://repo1.maven.org/maven2/com/microsoft/azure/azure-storage/7.0.1/azure-storage-7.0.1.jar
+
+ export CLASSPATH=$(${HADOOP_HOME}/bin/hadoop classpath --glob)
+
+ mkdir -p /tmp/hdfs
+
+ pushd ${HADOOP_HOME}
+ ./bin/hadoop credential create fs.azure.account.key.${OPENDAL_AZBLOB_ACCOUNT_NAME}.blob.core.windows.net -value ${OPENDAL_AZBLOB_ACCOUNT_KEY} -provider localjceks://file/tmp/hdfs/wasb.jceks
+ popd
+
+ cp ./fixtures/hdfs/hdfs-site.xml ${HADOOP_HOME}/etc/hadoop/hdfs-site.xml
+ cp ./fixtures/hdfs/azblob-core-site.xml ${HADOOP_HOME}/etc/hadoop/core-site.xml
+
+ cat << EOF >> $GITHUB_ENV
+ HADOOP_HOME=${HADOOP_HOME}
+ CLASSPATH=${CLASSPATH}
+ LD_LIBRARY_PATH=${JAVA_HOME}/lib/server:${HADOOP_HOME}/lib/native
+ OPENDAL_HDFS_ROOT=${OPENDAL_AZBLOB_ROOT}
+ OPENDAL_HDFS_NAME_NODE=wasbs://${OPENDAL_AZBLOB_CONTAINER}
+ OPENDAL_HDFS_ENABLE_APPEND=false
+ EOF
diff --git a/.github/services/hdfs/hdfs_default_s3/action.yml b/.github/services/hdfs/hdfs_default_s3/action.yml
new file mode 100644
index 000000000000..3f4055652822
--- /dev/null
+++ b/.github/services/hdfs/hdfs_default_s3/action.yml
@@ -0,0 +1,71 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: hdfs_default_s3
+description: 'Behavior test for hdfs default over aws s3'
+
+runs:
+ using: "composite"
+ steps:
+ - name: Setup java env
+ uses: actions/setup-java@v4
+ with:
+ distribution: temurin
+ java-version: "11"
+ - name: Load secrets
+ uses: 1password/load-secrets-action@v1
+ with:
+ export-env: true
+ env:
+ # Need a secret with this name to securely encrypt them - otherwise there will be warning
+ # See https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/CredentialProviderAPI.html#Keystore_Passwords
+ HADOOP_CREDSTORE_PASSWORD: op://services/hdfs/credstore_password
+ # TODO: Does this start with / and ends with /
+ OPENDAL_S3_ROOT: op://services/s3/root
+ OPENDAL_S3_BUCKET: op://services/s3/bucket
+ OPENDAL_S3_ACCESS_KEY_ID: op://services/s3/access_key_id
+ OPENDAL_S3_SECRET_ACCESS_KEY: op://services/s3/secret_access_key
+ - name: Setup
+ shell: bash
+ run: |
+ curl -LsSf https://dlcdn.apache.org/hadoop/common/hadoop-3.3.5/hadoop-3.3.5.tar.gz | tar zxf - -C /home/runner
+
+ export HADOOP_HOME="/home/runner/hadoop-3.3.5"
+
+ curl -LsSf -o ${HADOOP_HOME}/share/hadoop/common/lib/hadoop-aws-3.3.5.jar https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.5/hadoop-aws-3.3.5.jar
+ curl -LsSf -o ${HADOOP_HOME}/share/hadoop/common/lib/aws-java-sdk-bundle-1.12.653.jar https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.12.653/aws-java-sdk-bundle-1.12.653.jar
+
+ export CLASSPATH=$(${HADOOP_HOME}/bin/hadoop classpath --glob)
+
+ mkdir -p /tmp/hdfs
+
+ pushd ${HADOOP_HOME}
+ ./bin/hadoop credential create fs.s3a.access.key -value ${OPENDAL_S3_ACCESS_KEY_ID} -provider localjceks://file/tmp/hdfs/s3.jceks
+ ./bin/hadoop credential create fs.s3a.secret.key -value ${OPENDAL_S3_SECRET_ACCESS_KEY} -provider localjceks://file/tmp/hdfs/s3.jceks
+ popd
+
+ cp ./fixtures/hdfs/hdfs-site.xml ${HADOOP_HOME}/etc/hadoop/hdfs-site.xml
+ cp ./fixtures/hdfs/s3-core-site.xml ${HADOOP_HOME}/etc/hadoop/core-site.xml
+
+ cat << EOF >> $GITHUB_ENV
+ HADOOP_HOME=${HADOOP_HOME}
+ CLASSPATH=${CLASSPATH}
+ LD_LIBRARY_PATH=${JAVA_HOME}/lib/server:${HADOOP_HOME}/lib/native
+ OPENDAL_HDFS_ROOT=${OPENDAL_S3_ROOT}
+ OPENDAL_HDFS_NAME_NODE=s3a://${OPENDAL_S3_BUCKET}
+ OPENDAL_HDFS_ENABLE_APPEND=false
+ EOF
diff --git a/fixtures/hdfs/azblob-core-site.xml b/fixtures/hdfs/azblob-core-site.xml
new file mode 100644
index 000000000000..8d3616d43993
--- /dev/null
+++ b/fixtures/hdfs/azblob-core-site.xml
@@ -0,0 +1,33 @@
+
+
+
+
+
+
+
+
+
+ hadoop.security.credential.provider.path
+ localjceks://file/tmp/hdfs/wasb.jceks
+ Path to interrogate for protected azure blob storage credentials.
+
+
diff --git a/fixtures/hdfs/s3-core-site.xml b/fixtures/hdfs/s3-core-site.xml
new file mode 100644
index 000000000000..3b3df43300fb
--- /dev/null
+++ b/fixtures/hdfs/s3-core-site.xml
@@ -0,0 +1,33 @@
+
+
+
+
+
+
+
+
+
+ hadoop.security.credential.provider.path
+ localjceks://file/tmp/hdfs/s3.jceks
+ Path to interrogate for protected aws s3 storage credentials.
+
+