Skip to content

Commit

Permalink
dev
Browse files Browse the repository at this point in the history
  • Loading branch information
featzhang committed Apr 23, 2024
1 parent 317d65d commit 13446c1
Show file tree
Hide file tree
Showing 18 changed files with 26 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
@Data
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
@JsonTypeDefine(value = DataNodeType.HUDI)
@JsonTypeDefine(value = DataNodeType.PAIMON)
@ApiModel("Paimon data node info")
public class PaimonDataNodeInfo extends DataNodeInfo {

Expand All @@ -44,7 +44,7 @@ public class PaimonDataNodeInfo extends DataNodeInfo {
private String warehouse;

public PaimonDataNodeInfo() {
this.setType(DataNodeType.HUDI);
this.setType(DataNodeType.PAIMON);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
@Data
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
@JsonTypeDefine(value = DataNodeType.HUDI)
@JsonTypeDefine(value = DataNodeType.PAIMON)
@ApiModel("Paimon data node request")
public class PaimonDataNodeRequest extends DataNodeRequest {

Expand All @@ -43,7 +43,7 @@ public class PaimonDataNodeRequest extends DataNodeRequest {
private String warehouse;

public PaimonDataNodeRequest() {
this.setType(DataNodeType.HUDI);
this.setType(DataNodeType.PAIMON);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
@ApiModel(value = "Paimon sink info")
@JsonTypeDefine(value = SinkType.HUDI)
@JsonTypeDefine(value = SinkType.PAIMON)
public class PaimonSink extends StreamSink {

@ApiModelProperty("Catalog type, like: HIVE, HADOOP, default is HIVE")
Expand Down Expand Up @@ -77,7 +77,7 @@ public class PaimonSink extends StreamSink {
private String partitionKey;

public PaimonSink() {
this.setSinkType(SinkType.HUDI);
this.setSinkType(SinkType.PAIMON);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
@ApiModel(value = "Paimon sink request")
@JsonTypeDefine(value = SinkType.HUDI)
@JsonTypeDefine(value = SinkType.PAIMON)
public class PaimonSinkRequest extends SinkRequest {

@ApiModelProperty("Catalog type, like: HIVE, HADOOP, default is HIVE")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,6 @@ public static PaimonType forType(String type) {
return ibType;
}
}
throw new IllegalArgumentException(String.format("invalid hudi type = %s", type));
throw new IllegalArgumentException(String.format("invalid paimon type = %s", type));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public class PaimonProvider implements ExtractNodeProvider, LoadNodeProvider {

@Override
public Boolean accept(String streamType) {
return StreamType.HUDI.equals(streamType);
return StreamType.PAIMON.equals(streamType);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
@ApiModel(value = "Paimon source info")
@JsonTypeDefine(value = SourceType.HUDI)
@JsonTypeDefine(value = SourceType.PAIMON)
public class PaimonSource extends StreamSource {

@ApiModelProperty("The database name of Paimon")
Expand Down Expand Up @@ -70,7 +70,7 @@ public class PaimonSource extends StreamSource {
private List<HashMap<String, String>> extList;

public PaimonSource() {
this.setSourceType(SourceType.HUDI);
this.setSourceType(SourceType.PAIMON);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
@ApiModel(value = "Request of the Paimon source")
@JsonTypeDefine(value = SourceType.HUDI)
@JsonTypeDefine(value = SourceType.PAIMON)
public class PaimonSourceRequest extends SourceRequest {

@ApiModelProperty("The database name of Paimon")
Expand All @@ -61,7 +61,7 @@ public class PaimonSourceRequest extends SourceRequest {
private List<HashMap<String, String>> extList;

public PaimonSourceRequest() {
this.setSourceType(SourceType.HUDI);
this.setSourceType(SourceType.PAIMON);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public Boolean accept(String dataNodeType) {

@Override
public String getDataNodeType() {
return DataNodeType.HUDI;
return DataNodeType.PAIMON;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@
package org.apache.inlong.manager.service.resource.sink.paimon;

/**
* Hudi file format.
* Paimon file format.
*/
public enum PaimonFileFormat {

PARQUET,
HUDI_LOG,
PAIMON_LOG,
HFILE,
ORC;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ public class PaimonResourceOperator implements SinkResourceOperator {

@Override
public Boolean accept(String sinkType) {
return SinkType.HUDI.equals(sinkType);
return SinkType.PAIMON.equals(sinkType);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public class PaimonUtils {

public static final String IS_QUERY_AS_RO_TABLE = "Paimon.query.as.ro.table";

private static final Set<String> HUDI_METADATA_FILES =
private static final Set<String> PAIMON_METADATA_FILES =
Sets.newHashSet(COMMIT_TIME_METADATA_FILE_NAME, COMMIT_SEQNO_METADATA_FILE_NAME,
RECORD_KEY_METADATA_FILE_NAME, PARTITION_PATH_METADATA_FILE_NAME, METADATA_FILE_NAME,
OPERATION_METADATA_FILE_NAME);
Expand All @@ -57,7 +57,7 @@ public class PaimonUtils {
* Check if metadata file.
*/
public static boolean isMetadataFile(String fileName) {
return HUDI_METADATA_FILES.contains(fileName);
return PAIMON_METADATA_FILES.contains(fileName);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public Integer saveSink(String sinkName) {
PaimonSinkRequest sinkInfo = new PaimonSinkRequest();
sinkInfo.setInlongGroupId(globalGroupId);
sinkInfo.setInlongStreamId(globalStreamId);
sinkInfo.setSinkType(SinkType.HUDI);
sinkInfo.setSinkType(SinkType.PAIMON);
sinkInfo.setEnableCreateResource(InlongConstants.DISABLE_CREATE_RESOURCE);
sinkInfo.setDataPath("hdfs://127.0.0.1:8020/data");
sinkInfo.setSinkName(sinkName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.inlong.sort.protocol.constant;

/**
* Hudi option constant
* PAIMON option constant
*/
public class PaimonConstant {

Expand Down Expand Up @@ -142,7 +142,7 @@ public class PaimonConstant {
public static final String READ_STREAMING_SKIP_COMPACT = "read.streaming.skip_compaction";

/**
* Hudi supported catalog type
* PAIMON supported catalog type
*/
public enum CatalogType {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@
<configuration>
<artifactSet>
<includes>
<include>org.apache.paimon:*</include>
<include>org.apache.hudi:*</include>
<include>org.apache.hive:hive-exec</include>
<include>org.apache.hadoop:*</include>
<include>com.fasterxml.woodstox:*</include>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
* limitations under the License.
*/

package org.apache.inlong.sort.paimon.table.sink;
package org.apache.inlong.sort.hudi.table.sink;

import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.table.connector.sink.DynamicTableSink;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.

org.apache.inlong.sort.paimon.table.sink.HudiTableInlongFactory
org.apache.inlong.sort.hudi.table.sink.HudiTableInlongFactory
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@
<configuration>
<artifactSet>
<includes>
<include>org.apache.hudi:*</include>
<include>org.apache.paimon:*</include>
<include>org.apache.hive:hive-exec</include>
<include>org.apache.hadoop:*</include>
<include>com.fasterxml.woodstox:*</include>
Expand Down

0 comments on commit 13446c1

Please sign in to comment.