Skip to content

Commit

Permalink
[Fix-2392][metadata] Fix listColumnsSortByPK infinite recursion (#2404)
Browse files Browse the repository at this point in the history
Co-authored-by: wenmo <[email protected]>
  • Loading branch information
aiwenmo and aiwenmo authored Oct 18, 2023
1 parent 4962416 commit 10ca0e2
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 27 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -385,13 +385,6 @@ public List<Column> listColumns(String schemaName, String tableName) {
return columns;
}

@Override
public List<Column> listColumnsSortByPK(String schemaName, String tableName) {
List<Column> columnList = listColumns(schemaName, tableName);
columnList.sort(Comparator.comparing(Column::isKeyFlag).reversed());
return columnList;
}

@Override
public boolean createTable(Table table) throws Exception {
String sql = getCreateTableSql(table).replaceAll("\r\n", " ");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,8 +153,6 @@ static Driver buildNewConnection(String connector, String url, String username,

List<Column> listColumns(String schemaName, String tableName);

List<Column> listColumnsSortByPK(String schemaName, String tableName);

List<Schema> getSchemasAndTables();

List<Table> getTablesAndColumns(String schemaName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,6 @@

package com.dlink.metadata.driver;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import cn.hutool.core.text.CharSequenceUtil;
import com.dlink.metadata.convert.DorisTypeConvert;
import com.dlink.metadata.convert.ITypeConvert;
import com.dlink.metadata.query.DorisQuery;
Expand All @@ -38,6 +31,15 @@
import com.dlink.utils.LogUtil;
import com.dlink.utils.SqlUtil;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import cn.hutool.core.text.CharSequenceUtil;

public class DorisDriver extends AbstractJdbcDriver {

@Override
Expand Down Expand Up @@ -133,11 +135,13 @@ public String generateCreateTableSql(Table table) {

@Override
public String getCreateTableSql(Table table) {
List<String> dorisTypes = Arrays.asList("BOOLEAN", "TINYINT", "SMALLINT", "SMALLINT", "INT", "BIGINT", "LARGEINT", "FLOAT", "DOUBLE", "DECIMAL", "DATE", "DATETIME", "CHAR", "VARCHAR", "TEXT", "TIMESTAMP", "STRING");
List<String> dorisTypes = Arrays.asList("BOOLEAN", "TINYINT", "SMALLINT", "SMALLINT", "INT", "BIGINT",
"LARGEINT", "FLOAT", "DOUBLE", "DECIMAL", "DATE", "DATETIME", "CHAR", "VARCHAR", "TEXT", "TIMESTAMP",
"STRING");
StringBuilder keyBuffer = new StringBuilder();
StringBuilder ddlBuffer = new StringBuilder();
ddlBuffer.append("CREATE TABLE IF NOT EXISTS ").append(table.getSchema()).append(".").append(table.getName())
.append(" (").append(System.lineSeparator());
.append(" (").append(System.lineSeparator());
for (int i = 0; i < table.getColumns().size(); i++) {
Column columnInfo = table.getColumns().get(i);
String cType = columnInfo.getType().split(" ")[0].toUpperCase();
Expand All @@ -146,27 +150,29 @@ public String getCreateTableSql(Table table) {
cType = columnInfo.getJavaType().getFlinkType().toUpperCase();
if (!dorisTypes.contains(cType)) {
logger.error("doris does not support {} type", columnInfo.getType());
return "";
return "";
}
}
if (cType.equalsIgnoreCase("TIMESTAMP")) {
ddlBuffer.append("DATETIME");
} else if (columnInfo.getType().equalsIgnoreCase("TEXT")) {
ddlBuffer.append("STRING");
}else{
} else {
ddlBuffer.append(cType);
}
if(columnInfo.getLength()!=null &&columnInfo.getLength()>0 ) {
ddlBuffer.append("(").append(cType.equalsIgnoreCase("VARCHAR") ?columnInfo.getLength() * 3 : columnInfo.getLength()).append(")");
if (columnInfo.getLength() != null && columnInfo.getLength() > 0) {
ddlBuffer.append("(")
.append(cType.equalsIgnoreCase("VARCHAR") ? columnInfo.getLength() * 3 : columnInfo.getLength())
.append(")");
}
if (columnInfo.getComment()!=null) {
if (columnInfo.getComment() != null) {
ddlBuffer.append(" COMMENT '").append(columnInfo.getComment()).append("'");
}
if (i < table.getColumns().size() - 1) {
ddlBuffer.append(",");
}
ddlBuffer.append(System.lineSeparator());
if(columnInfo.isKeyFlag()){
if (columnInfo.isKeyFlag()) {
keyBuffer.append(columnInfo.getName()).append(",");
}

Expand All @@ -175,7 +181,8 @@ public String getCreateTableSql(Table table) {
String primaryKeys = keyBuffer.substring(0, keyBuffer.length() - 1);
ddlBuffer.append(") UNIQUE KEY (").append(primaryKeys).append(")").append(System.lineSeparator());
ddlBuffer.append("COMMENT '").append(table.getComment()).append("'");
ddlBuffer.append(" DISTRIBUTED BY HASH (").append(primaryKeys).append(") BUCKETS AUTO").append(System.lineSeparator());
ddlBuffer.append(" DISTRIBUTED BY HASH (").append(primaryKeys).append(") BUCKETS AUTO")
.append(System.lineSeparator());
//
ddlBuffer.append(" PROPERTIES ( \"replication_allocation\" = \"tag.location.default: 3\")");
return ddlBuffer.toString();
Expand All @@ -184,6 +191,8 @@ public String getCreateTableSql(Table table) {
@Override
public List<Column> listColumns(String schemaName, String tableName) {
// Doris 中声明为 Key 的列(可能是多个)必须顺序声明在建表语句头部,因此按 Key 对列重新排序
return listColumnsSortByPK(schemaName, tableName);
List<Column> columnList = super.listColumns(schemaName, tableName);
columnList.sort(Comparator.comparing(Column::isKeyFlag).reversed());
return columnList;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import com.dlink.utils.SqlUtil;

import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -112,6 +113,8 @@ public Map<String, String> getFlinkColumnTypeConversion() {
@Override
public List<Column> listColumns(String schemaName, String tableName) {
// StarRocks 中声明为 Key 的列(可能是多个)必须顺序声明在建表语句头部,因此按 Key 对列重新排序
return listColumnsSortByPK(schemaName, tableName);
List<Column> columnList = super.listColumns(schemaName, tableName);
columnList.sort(Comparator.comparing(Column::isKeyFlag).reversed());
return columnList;
}
}

0 comments on commit 10ca0e2

Please sign in to comment.