Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into refactor-release
Browse files Browse the repository at this point in the history
  • Loading branch information
Xuanwo committed Feb 18, 2024
2 parents 5edb465 + e668413 commit 07f400c
Show file tree
Hide file tree
Showing 19 changed files with 794 additions and 82 deletions.
62 changes: 62 additions & 0 deletions .github/services/hdfs/hdfs_default_gcs/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

name: hdfs_default_gcs
description: 'Behavior test for hdfs default over gcs'

runs:
using: "composite"
steps:
- name: Setup java env
uses: actions/setup-java@v4
with:
distribution: temurin
java-version: "11"
- name: Load secrets
uses: 1password/load-secrets-action@v1
with:
export-env: true
env:
OPENDAL_GCS_ROOT: op://services/gcs/root
OPENDAL_GCS_BUCKET: op://services/gcs/bucket
OPENDAL_GCS_CREDENTIAL: op://services/gcs/credential
- name: Setup
shell: bash
run: |
curl -LsSf https://dlcdn.apache.org/hadoop/common/hadoop-3.3.5/hadoop-3.3.5.tar.gz | tar zxf - -C /home/runner
export HADOOP_HOME="/home/runner/hadoop-3.3.5"
curl -LsSf -o ${HADOOP_HOME}/share/hadoop/common/lib/gcs-connector-hadoop3-2.2.19-shaded.jar https://github.com/GoogleCloudDataproc/hadoop-connectors/releases/download/v2.2.19/gcs-connector-hadoop3-2.2.19-shaded.jar
export CLASSPATH=$(${HADOOP_HOME}/bin/hadoop classpath --glob)
cp ./fixtures/hdfs/hdfs-site.xml ${HADOOP_HOME}/etc/hadoop/hdfs-site.xml
cp ./fixtures/hdfs/gcs-core-site.xml ${HADOOP_HOME}/etc/hadoop/core-site.xml
cat << EOF >> $GITHUB_ENV
HADOOP_HOME=${HADOOP_HOME}
CLASSPATH=${CLASSPATH}
LD_LIBRARY_PATH=${JAVA_HOME}/lib/server:${HADOOP_HOME}/lib/native
OPENDAL_HDFS_ROOT=${OPENDAL_GCS_ROOT}
OPENDAL_HDFS_NAME_NODE=gs://${OPENDAL_GCS_BUCKET}
OPENDAL_HDFS_ENABLE_APPEND=false
EOF
mkdir -p /tmp/hdfs
echo ${OPENDAL_GCS_CREDENTIAL} | base64 -d > /tmp/hdfs/gcs-credentials.json
71 changes: 71 additions & 0 deletions .github/services/hdfs/hdfs_default_on_azurite_azblob/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

name: hdfs_default_on_azurite_azblob
description: 'Behavior test for hdfs default over azurite azblob'

runs:
using: "composite"
steps:
- name: Setup java env
uses: actions/setup-java@v4
with:
distribution: temurin
java-version: "11"
- name: Setup azurite azblob service
shell: bash
working-directory: fixtures/azblob
run: docker compose -f docker-compose-azurite.yml up -d --wait
- name: Setup test bucket
shell: bash
run: |
az storage container create \
--name test \
--connection-string "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;"
- name: Setup
shell: bash
run: |
OPENDAL_AZBLOB_CONTAINER=test
OPENDAL_AZBLOB_ACCOUNT_NAME=devstoreaccount1
OPENDAL_AZBLOB_ACCOUNT_KEY=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==
curl -LsSf https://dlcdn.apache.org/hadoop/common/hadoop-3.3.5/hadoop-3.3.5.tar.gz | tar zxf - -C /home/runner
export HADOOP_HOME="/home/runner/hadoop-3.3.5"
curl -LsSf -o ${HADOOP_HOME}/share/hadoop/common/lib/hadoop-azure-3.3.5.jar https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure/3.3.5/hadoop-azure-3.3.5.jar
curl -LsSf -o ${HADOOP_HOME}/share/hadoop/common/lib/azure-storage-7.0.1.jar https://repo1.maven.org/maven2/com/microsoft/azure/azure-storage/7.0.1/azure-storage-7.0.1.jar
export CLASSPATH=$(${HADOOP_HOME}/bin/hadoop classpath --glob)
mkdir -p /tmp/hdfs
pushd ${HADOOP_HOME}
./bin/hadoop credential create fs.azure.account.key.${OPENDAL_AZBLOB_ACCOUNT_NAME}.blob.core.windows.net -value ${OPENDAL_AZBLOB_ACCOUNT_KEY} -provider localjceks://file/tmp/hdfs/azurite-wasb.jceks
popd
cp ./fixtures/hdfs/hdfs-site.xml ${HADOOP_HOME}/etc/hadoop/hdfs-site.xml
cp ./fixtures/hdfs/azurite-azblob-core-site.xml ${HADOOP_HOME}/etc/hadoop/core-site.xml
cat << EOF >> $GITHUB_ENV
HADOOP_HOME=${HADOOP_HOME}
CLASSPATH=${CLASSPATH}
LD_LIBRARY_PATH=${JAVA_HOME}/lib/server:${HADOOP_HOME}/lib/native
OPENDAL_HDFS_ROOT=${OPENDAL_AZBLOB_ROOT}
OPENDAL_HDFS_NAME_NODE=wasb://${OPENDAL_AZBLOB_CONTAINER}@${OPENDAL_AZBLOB_ACCOUNT_NAME}.blob.core.windows.net
OPENDAL_HDFS_ENABLE_APPEND=false
EOF
68 changes: 68 additions & 0 deletions .github/services/hdfs/hdfs_default_on_minio_s3/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

name: hdfs_default_on_minio_s3
description: 'Behavior test for hdfs default on minio s3'

runs:
using: "composite"
steps:
- name: Setup java env
uses: actions/setup-java@v4
with:
distribution: temurin
java-version: "11"
- name: Setup MinIO Server
shell: bash
working-directory: fixtures/s3
run: docker compose -f docker-compose-minio.yml up -d --wait
- name: Setup test bucket
shell: bash
env:
AWS_ACCESS_KEY_ID: "minioadmin"
AWS_SECRET_ACCESS_KEY: "minioadmin"
AWS_EC2_METADATA_DISABLED: "true"
run: aws --endpoint-url http://127.0.0.1:9000/ s3 mb s3://test
- name: Setup
shell: bash
run: |
curl -LsSf https://dlcdn.apache.org/hadoop/common/hadoop-3.3.5/hadoop-3.3.5.tar.gz | tar zxf - -C /home/runner
export HADOOP_HOME="/home/runner/hadoop-3.3.5"
curl -LsSf -o ${HADOOP_HOME}/share/hadoop/common/lib/hadoop-aws-3.3.5.jar https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/3.3.5/hadoop-aws-3.3.5.jar
curl -LsSf -o ${HADOOP_HOME}/share/hadoop/common/lib/aws-java-sdk-bundle-1.12.653.jar https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/1.12.653/aws-java-sdk-bundle-1.12.653.jar
export CLASSPATH=$(${HADOOP_HOME}/bin/hadoop classpath --glob)
mkdir -p /tmp/hdfs
pushd ${HADOOP_HOME}
./bin/hadoop credential create fs.s3a.access.key -value minioadmin -provider localjceks://file/tmp/hdfs/s3.jceks
./bin/hadoop credential create fs.s3a.secret.key -value minioadmin -provider localjceks://file/tmp/hdfs/s3.jceks
popd
cp ./fixtures/hdfs/hdfs-site.xml ${HADOOP_HOME}/etc/hadoop/hdfs-site.xml
cp ./fixtures/hdfs/minio-s3-core-site.xml ${HADOOP_HOME}/etc/hadoop/core-site.xml
cat << EOF >> $GITHUB_ENV
HADOOP_HOME=${HADOOP_HOME}
CLASSPATH=${CLASSPATH}
LD_LIBRARY_PATH=${JAVA_HOME}/lib/server:${HADOOP_HOME}/lib/native
OPENDAL_HDFS_NAME_NODE=s3a://test
OPENDAL_HDFS_ENABLE_APPEND=false
EOF
5 changes: 3 additions & 2 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,10 @@ jobs:
need-protoc: true
github-token: ${{ secrets.GITHUB_TOKEN }}

# Revert to nightly after https://github.com/apache/opendal/issues/4161 addressed
- name: Setup Rust Nightly
run: |
rustup toolchain install nightly
rustup toolchain install nightly-2024-01-01
- uses: actions/setup-java@v4
with:
Expand All @@ -58,7 +59,7 @@ jobs:

- name: Build OpenDAL doc
working-directory: core
run: cargo +nightly doc --lib --no-deps --all-features
run: cargo +nightly-2024-01-01 doc --lib --no-deps --all-features
env:
LD_LIBRARY_PATH: ${{ env.JAVA_HOME }}/lib/server:${{ env.LD_LIBRARY_PATH }}
# Enable cfg docs to make sure docs are built.
Expand Down
28 changes: 14 additions & 14 deletions core/src/layers/dtrace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -194,10 +194,10 @@ impl<A: Accessor> Debug for DTraceAccessor<A> {
#[async_trait]
impl<A: Accessor> LayeredAccessor for DTraceAccessor<A> {
type Inner = A;
type Reader = DtraceLayerWarpper<A::Reader>;
type BlockingReader = DtraceLayerWarpper<A::BlockingReader>;
type Writer = DtraceLayerWarpper<A::Writer>;
type BlockingWriter = DtraceLayerWarpper<A::BlockingWriter>;
type Reader = DtraceLayerWrapper<A::Reader>;
type BlockingReader = DtraceLayerWrapper<A::BlockingReader>;
type Writer = DtraceLayerWrapper<A::Writer>;
type BlockingWriter = DtraceLayerWrapper<A::BlockingWriter>;
type Lister = A::Lister;
type BlockingLister = A::BlockingLister;

Expand All @@ -220,7 +220,7 @@ impl<A: Accessor> LayeredAccessor for DTraceAccessor<A> {
.inner
.read(path, args)
.await
.map(|(rp, r)| (rp, DtraceLayerWarpper::new(r, &path.to_string())));
.map(|(rp, r)| (rp, DtraceLayerWrapper::new(r, &path.to_string())));
probe_lazy!(opendal, read_end, c_path.as_ptr());
result
}
Expand All @@ -232,7 +232,7 @@ impl<A: Accessor> LayeredAccessor for DTraceAccessor<A> {
.inner
.write(path, args)
.await
.map(|(rp, r)| (rp, DtraceLayerWarpper::new(r, &path.to_string())));
.map(|(rp, r)| (rp, DtraceLayerWrapper::new(r, &path.to_string())));

probe_lazy!(opendal, write_end, c_path.as_ptr());
result
Expand Down Expand Up @@ -288,7 +288,7 @@ impl<A: Accessor> LayeredAccessor for DTraceAccessor<A> {
let result = self
.inner
.blocking_read(path, args)
.map(|(rp, r)| (rp, DtraceLayerWarpper::new(r, &path.to_string())));
.map(|(rp, r)| (rp, DtraceLayerWrapper::new(r, &path.to_string())));
probe_lazy!(opendal, blocking_read_end, c_path.as_ptr());
result
}
Expand All @@ -299,7 +299,7 @@ impl<A: Accessor> LayeredAccessor for DTraceAccessor<A> {
let result = self
.inner
.blocking_write(path, args)
.map(|(rp, r)| (rp, DtraceLayerWarpper::new(r, &path.to_string())));
.map(|(rp, r)| (rp, DtraceLayerWrapper::new(r, &path.to_string())));
probe_lazy!(opendal, blocking_write_end, c_path.as_ptr());
result
}
Expand Down Expand Up @@ -329,12 +329,12 @@ impl<A: Accessor> LayeredAccessor for DTraceAccessor<A> {
}
}

pub struct DtraceLayerWarpper<R> {
pub struct DtraceLayerWrapper<R> {
inner: R,
path: String,
}

impl<R> DtraceLayerWarpper<R> {
impl<R> DtraceLayerWrapper<R> {
pub fn new(inner: R, path: &String) -> Self {
Self {
inner,
Expand All @@ -343,7 +343,7 @@ impl<R> DtraceLayerWarpper<R> {
}
}

impl<R: oio::Read> oio::Read for DtraceLayerWarpper<R> {
impl<R: oio::Read> oio::Read for DtraceLayerWrapper<R> {
fn poll_read(&mut self, cx: &mut Context<'_>, buf: &mut [u8]) -> Poll<Result<usize>> {
let c_path = CString::new(self.path.clone()).unwrap();
probe_lazy!(opendal, reader_read_start, c_path.as_ptr());
Expand Down Expand Up @@ -393,7 +393,7 @@ impl<R: oio::Read> oio::Read for DtraceLayerWarpper<R> {
}
}

impl<R: oio::BlockingRead> oio::BlockingRead for DtraceLayerWarpper<R> {
impl<R: oio::BlockingRead> oio::BlockingRead for DtraceLayerWrapper<R> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let c_path = CString::new(self.path.clone()).unwrap();
probe_lazy!(opendal, blocking_reader_read_start, c_path.as_ptr());
Expand Down Expand Up @@ -445,7 +445,7 @@ impl<R: oio::BlockingRead> oio::BlockingRead for DtraceLayerWarpper<R> {
}
}

impl<R: oio::Write> oio::Write for DtraceLayerWarpper<R> {
impl<R: oio::Write> oio::Write for DtraceLayerWrapper<R> {
fn poll_write(&mut self, cx: &mut Context<'_>, bs: &dyn oio::WriteBuf) -> Poll<Result<usize>> {
let c_path = CString::new(self.path.clone()).unwrap();
probe_lazy!(opendal, writer_write_start, c_path.as_ptr());
Expand Down Expand Up @@ -490,7 +490,7 @@ impl<R: oio::Write> oio::Write for DtraceLayerWarpper<R> {
}
}

impl<R: oio::BlockingWrite> oio::BlockingWrite for DtraceLayerWarpper<R> {
impl<R: oio::BlockingWrite> oio::BlockingWrite for DtraceLayerWrapper<R> {
fn write(&mut self, bs: &dyn oio::WriteBuf) -> Result<usize> {
let c_path = CString::new(self.path.clone()).unwrap();
probe_lazy!(opendal, blocking_writer_write_start, c_path.as_ptr());
Expand Down
5 changes: 3 additions & 2 deletions core/src/services/azblob/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -567,8 +567,9 @@ impl Accessor for AzblobBackend {
read_with_override_content_disposition: true,

write: true,
write_can_empty: true,
write_can_append: true,
write_can_empty: true,
write_can_multi: true,
write_with_cache_control: true,
write_with_content_type: true,

Expand Down Expand Up @@ -631,7 +632,7 @@ impl Accessor for AzblobBackend {
let w = if args.append() {
AzblobWriters::Two(oio::AppendWriter::new(w))
} else {
AzblobWriters::One(oio::OneShotWriter::new(w))
AzblobWriters::One(oio::BlockWriter::new(w, args.concurrent()))
};

Ok((RpWrite::default(), w))
Expand Down
Loading

0 comments on commit 07f400c

Please sign in to comment.