Skip to content

Commit

Permalink
add no-op hbase metrics, include more dependencies in hudi jar
Browse files Browse the repository at this point in the history
  • Loading branch information
the-other-tim-brown committed Jan 13, 2025
1 parent 6d7b0dc commit 06ff4e7
Show file tree
Hide file tree
Showing 8 changed files with 224 additions and 9 deletions.
5 changes: 5 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,11 @@
<artifactId>parquet-column</artifactId>
<version>${parquet.version}</version>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop</artifactId>
<version>${parquet.version}</version>
</dependency>

<!-- Logging -->
<dependency>
Expand Down
42 changes: 36 additions & 6 deletions xtable-hudi/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,14 @@
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-column</artifactId>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-avro</artifactId>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-hadoop</artifactId>
</dependency>

<!-- Hudi dependencies -->
<dependency>
Expand All @@ -73,12 +81,6 @@
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-java-client</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
</exclusion>
</exclusions>
</dependency>

<!-- Hadoop dependencies -->
Expand Down Expand Up @@ -181,6 +183,7 @@
<goal>shade</goal>
</goals>
<configuration>
<createSourcesJar>true</createSourcesJar>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"> </transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
Expand All @@ -205,14 +208,30 @@
<include>com.fasterxml.jackson.core:jackson-databind</include>
<include>com.fasterxml.jackson.datatype:jackson-datatype-jsr310</include>
<include>com.fasterxml.jackson.core:jackson-core</include>
<include>com.fasterxml.jackson.core:jackson-annotations</include>
<include>org.apache.parquet:parquet-column</include>
<include>org.apache.parquet:parquet-avro</include>
<include>org.apache.parquet:parquet-common</include>
<include>org.apache.parquet:parquet-encoding</include>
<include>org.apache.parquet:parquet-hadoop</include>
<include>org.apache.parquet:parquet-format-structures</include>
<include>org.apache.hudi:hudi-java-client</include>
<include>org.apache.hudi:hudi-client-common</include>
<include>org.apache.hudi:hudi-common</include>
<include>org.apache.avro:avro</include>
<include>com.google.guava:guava</include>
<!-- hbase -->
<include>org.apache.hbase:hbase-client</include>
<include>org.apache.hbase:hbase-common</include>
<include>org.apache.hbase:hbase-hadoop-compat</include>
<include>org.apache.hbase:hbase-metrics</include>
<include>org.apache.hbase:hbase-metrics-api</include>
<include>org.apache.hbase:hbase-protocol</include>
<include>org.apache.hbase:hbase-protocol-shaded</include>
<include>org.apache.hbase:hbase-server</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-miscellaneous</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-netty</include>
<include>org.apache.hbase.thirdparty:hbase-shaded-protobuf</include>
</includes>
</artifactSet>
<relocations combine.children="append">
Expand All @@ -223,6 +242,17 @@
<relocation>
<pattern>org.apache.hudi.</pattern>
<shadedPattern>org.apache.xtable.shade.org.apache.hudi.</shadedPattern>
<excludes>
<exclude>org.apache.hudi.io.storage.HoodieHBaseKVComparator</exclude>
</excludes>
</relocation>
<relocation>
<pattern>org.apache.hadoop.hbase.</pattern>
<shadedPattern>org.apache.xtable.shade.org.apache.hadoop.hbase.</shadedPattern>
<excludes>
<exclude>org.apache.hadoop.hbase.CellComparator</exclude>
<exclude>org.apache.hadoop.hbase.CellComparatorImpl</exclude>
</excludes>
</relocation>
<relocation>
<pattern>org.apache.avro.</pattern>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.xtable.hbase;

import org.apache.hadoop.hbase.io.MetricsIOSource;
import org.apache.hadoop.hbase.io.MetricsIOWrapper;
import org.apache.hadoop.hbase.regionserver.MetricsHeapMemoryManagerSource;
import org.apache.hadoop.hbase.regionserver.MetricsRegionServerSource;
import org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactory;
import org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper;
import org.apache.hadoop.hbase.regionserver.MetricsRegionSource;
import org.apache.hadoop.hbase.regionserver.MetricsRegionWrapper;
import org.apache.hadoop.hbase.regionserver.MetricsTableAggregateSource;
import org.apache.hadoop.hbase.regionserver.MetricsTableSource;
import org.apache.hadoop.hbase.regionserver.MetricsTableWrapperAggregate;
import org.apache.hadoop.hbase.regionserver.MetricsUserAggregateSource;
import org.apache.hadoop.hbase.regionserver.MetricsUserSource;

/** Provides a No-Op metrics implementation for the HFile required by Hudi. */
public class NoOpMetricsRegionServerSourceFactory implements MetricsRegionServerSourceFactory {
@Override
public MetricsRegionServerSource createServer(MetricsRegionServerWrapper regionServerWrapper) {
return null;
}

@Override
public MetricsRegionSource createRegion(MetricsRegionWrapper wrapper) {
return null;
}

@Override
public MetricsUserSource createUser(String shortUserName) {
return null;
}

@Override
public MetricsUserAggregateSource getUserAggregate() {
return null;
}

@Override
public MetricsTableSource createTable(String table, MetricsTableWrapperAggregate wrapper) {
return null;
}

@Override
public MetricsTableAggregateSource getTableAggregate() {
return null;
}

@Override
public MetricsHeapMemoryManagerSource getHeapMemoryManager() {
return null;
}

@Override
public MetricsIOSource createIO(MetricsIOWrapper wrapper) {
return new NoOpMetricsIOSource();
}

private static class NoOpMetricsIOSource implements MetricsIOSource {

@Override
public void updateFsReadTime(long t) {}

@Override
public void updateFsPReadTime(long t) {}

@Override
public void updateFsWriteTime(long t) {}

@Override
public void init() {}

@Override
public void setGauge(String gaugeName, long value) {}

@Override
public void incGauge(String gaugeName, long delta) {}

@Override
public void decGauge(String gaugeName, long delta) {}

@Override
public void removeMetric(String key) {}

@Override
public void incCounters(String counterName, long delta) {}

@Override
public void updateHistogram(String name, long value) {}

@Override
public String getMetricsContext() {
return "";
}

@Override
public String getMetricsDescription() {
return "";
}

@Override
public String getMetricsJmxContext() {
return "";
}

@Override
public String getMetricsName() {
return "";
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
org.apache.xtable.hbase.NoOpMetricsRegionServerSourceFactory
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
org.apache.xtable.hbase.NoOpMetricsRegionServerSourceFactory
15 changes: 15 additions & 0 deletions xtable-integration-tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,18 @@
<artifactId>hudi-spark${spark.version.prefix}-bundle_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hudi</groupId>
<artifactId>hudi-java-client</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
</exclusion>
</exclusions>
<scope>test</scope>
</dependency>

<!-- Iceberg dependencies -->
<dependency>
<groupId>org.apache.iceberg</groupId>
Expand Down Expand Up @@ -119,18 +131,21 @@
<groupId>org.apache.xtable</groupId>
<artifactId>xtable-delta_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<classifier>bundled</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.xtable</groupId>
<artifactId>xtable-iceberg</artifactId>
<version>${project.version}</version>
<classifier>bundled</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.xtable</groupId>
<artifactId>xtable-hudi</artifactId>
<version>${project.version}</version>
<classifier>bundled</classifier>
<scope>test</scope>
</dependency>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,7 @@
import org.apache.hudi.common.model.HoodieAvroPayload;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.HoodieInstant;

import org.apache.iceberg.Snapshot;
import org.apache.iceberg.Table;
import org.apache.iceberg.hadoop.HadoopTables;

Expand All @@ -98,6 +96,8 @@
import org.apache.xtable.iceberg.IcebergConversionSourceProvider;
import org.apache.xtable.model.storage.TableFormat;
import org.apache.xtable.model.sync.SyncMode;
import org.apache.xtable.shade.org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.xtable.shade.org.apache.iceberg.Snapshot;

public class ITConversionController {
@TempDir public static Path tempDir;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
import org.junit.jupiter.api.io.TempDir;

import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.config.HoodieArchivalConfig;

import org.apache.xtable.GenericTable;
Expand All @@ -47,6 +46,7 @@
import org.apache.xtable.hudi.TestJavaHudiTable;
import org.apache.xtable.model.storage.TableFormat;
import org.apache.xtable.model.sync.SyncMode;
import org.apache.xtable.shade.org.apache.hudi.common.table.timeline.HoodieInstant;

/**
* Tests that can be run manually to simulate lots of commits/partitions/files/etc. to understand
Expand Down

0 comments on commit 06ff4e7

Please sign in to comment.